sayakpaul HF Staff commited on
Commit
bbf66ad
·
verified ·
1 Parent(s): 254fdbd

Update prompt_expander.py

Browse files
Files changed (1) hide show
  1. prompt_expander.py +7 -3
prompt_expander.py CHANGED
@@ -14,7 +14,8 @@ SYSTEM_PROMPT = (
14
  "Take the user's short description and expand it into a vivid, detailed, and clear image generation prompt. "
15
  "Ensure rich colors, depth, realistic lighting, and an imaginative composition. "
16
  "Avoid vague terms — be specific about style, perspective, and mood. "
17
- "Try to keep the output under 512 tokens."
 
18
  )
19
 
20
  class GeminiPromptExpander(ModularPipelineBlocks):
@@ -64,8 +65,11 @@ class GeminiPromptExpander(ModularPipelineBlocks):
64
  def __call__(self, components, state: PipelineState) -> PipelineState:
65
  block_state = self.get_block_state(state)
66
 
67
- block_state.old_prompt = block_state.prompt
68
- block_state.prompt = self.model.generate_content(block_state.old_prompt).text
 
 
 
69
  self.set_block_state(state, block_state)
70
 
71
  return components, state
 
14
  "Take the user's short description and expand it into a vivid, detailed, and clear image generation prompt. "
15
  "Ensure rich colors, depth, realistic lighting, and an imaginative composition. "
16
  "Avoid vague terms — be specific about style, perspective, and mood. "
17
+ "Try to keep the output under 512 tokens. "
18
+ "Please don't return any prefix or suffix tokens, just the expanded user description."
19
  )
20
 
21
  class GeminiPromptExpander(ModularPipelineBlocks):
 
65
  def __call__(self, components, state: PipelineState) -> PipelineState:
66
  block_state = self.get_block_state(state)
67
 
68
+ old_prompt = block_state.prompt
69
+ # print(f"Actual prompt: {old_prompt}")
70
+ block_state.prompt = self.model.generate_content(old_prompt).text
71
+ block_state.old_prompt = old_prompt
72
+ # print(f"{block_state.prompt=}")
73
  self.set_block_state(state, block_state)
74
 
75
  return components, state