Ephemeral182 commited on
Commit
07ec0a5
ยท
verified ยท
1 Parent(s): 8136fd1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -73
app.py CHANGED
@@ -179,28 +179,28 @@ def generate_poster(
179
  seed_input,
180
  progress=gr.Progress(track_tqdm=True),
181
  ):
182
- """ไฝฟ็”จ้ข„ๅŠ ่ฝฝ็š„ๆจกๅž‹็”ŸๆˆๆตทๆŠฅ"""
183
  if not original_prompt or not original_prompt.strip():
184
- return None, "โŒ ๆ็คบ่ฏไธ่ƒฝไธบ็ฉบ๏ผ", ""
185
 
186
  try:
187
  if not HF_TOKEN:
188
- return None, "โŒ ้”™่ฏฏ๏ผšๆœชๆ‰พๅˆฐ HF_TOKEN๏ผŒ่ฏท้…็ฝฎ่ฎค่ฏใ€‚", ""
189
 
190
- progress(0.1, desc="ๅผ€ๅง‹็”Ÿๆˆ...")
191
 
192
- # ็กฎๅฎšๆœ€็ปˆๆ็คบ่ฏ
193
  final_prompt = original_prompt
194
  if enable_recap:
195
- progress(0.2, desc="ๅขžๅผบๆ็คบ่ฏ...")
196
  final_prompt = enhance_prompt_with_qwen(original_prompt)
197
 
198
- # ็กฎๅฎš็งๅญ
199
  actual_seed = int(seed_input) if seed_input and seed_input != -1 else random.randint(1, 2**32 - 1)
200
 
201
- progress(0.3, desc="็”Ÿๆˆๅ›พๅƒ...")
202
 
203
- # ไฝฟ็”จ้ข„ๅŠ ่ฝฝ็š„ FLUX ็ฎก้“็”Ÿๆˆๅ›พๅƒ
204
  generator = torch.Generator("cuda").manual_seed(actual_seed)
205
 
206
  with torch.inference_mode():
@@ -213,99 +213,62 @@ def generate_poster(
213
  height=int(height)
214
  ).images[0]
215
 
216
- progress(1.0, desc="ๅฎŒๆˆ๏ผ")
217
- status_log = f"โœ… ็”ŸๆˆๅฎŒๆˆ๏ผ็งๅญ๏ผš{actual_seed}"
218
  return image, status_log, final_prompt
219
 
220
  except Exception as e:
221
- logging.error(f"็”Ÿๆˆๅคฑ่ดฅ๏ผš{e}")
222
- return None, f"โŒ ็”Ÿๆˆๅคฑ่ดฅ๏ผš{str(e)}", ""
223
 
224
  # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
225
- # Gradio ็•Œ้ข
226
  # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
227
  def create_interface():
228
- """ๅˆ›ๅปบ Gradio ็•Œ้ข"""
229
 
230
- with gr.Blocks(
231
- title="PosterCraft-v1.0",
232
- theme=gr.themes.Soft(),
233
- css="""
234
- .main-container { max-width: 1200px; margin: 0 auto; }
235
- .status-box { padding: 10px; border-radius: 5px; margin: 10px 0; }
236
- """
237
- ) as demo:
238
-
239
- gr.HTML("""
240
- <div class="main-container">
241
- <h1 style="text-align: center; margin-bottom: 20px;">๐ŸŽจ PosterCraft-v1.0</h1>
242
- <p style="text-align: center; color: #666; margin-bottom: 30px;">
243
- ไธ“ไธšๆตทๆŠฅ็”Ÿๆˆๅทฅๅ…ท๏ผŒๅŸบไบŽ FLUX.1-dev ๅ’Œๅฎšๅˆถๅพฎ่ฐƒๆƒ้‡
244
- </p>
245
- </div>
246
- """)
247
-
248
- with gr.Row():
249
- gr.Markdown(f"**ๅŸบ็ก€ๆจกๅž‹๏ผš** `{DEFAULT_PIPELINE_PATH}`")
250
- gr.Markdown(f"**่ฎค่ฏ็Šถๆ€๏ผš** {auth_status}")
251
-
252
- gr.HTML("""
253
- <div class="status-box">
254
- <p><strong>โšก ้ฆ–ๆฌก็”Ÿๆˆ้œ€่ฆๅŠ ่ฝฝๆจกๅž‹๏ผˆ5-10ๅˆ†้’Ÿ๏ผ‰๏ผŒๅŽ็ปญ็”Ÿๆˆไผš้žๅธธๅฟซ๏ผ</strong></p>
255
- </div>
256
- """)
257
 
258
  with gr.Row():
259
  with gr.Column(scale=1):
260
- gr.Markdown("### 1. ้…็ฝฎ")
261
  prompt_input = gr.Textbox(
262
- label="ๆตทๆŠฅๆ็คบ่ฏ",
263
  lines=3,
264
- placeholder="่พ“ๅ…ฅๆ‚จ็š„ๆตทๆŠฅๆ่ฟฐ...",
265
- value="ๅคๅค็ง‘ๅนป็”ตๅฝฑๆตทๆŠฅ๏ผŒ้œ“่™น่‰ฒๅฝฉๅ’Œ้ฃž่กŒๆฑฝ่ฝฆ"
266
  )
267
  enable_recap_checkbox = gr.Checkbox(
268
- label="ๅฏ็”จๆ็คบ่ฏๅขžๅผบ (Qwen3-8B)",
269
  value=True,
270
- info="ไฝฟ็”จ AI ๅขžๅผบๅ’Œๆ‰ฉๅฑ•ๆ‚จ็š„ๆ็คบ่ฏ"
271
  )
272
 
273
  with gr.Row():
274
- # ้ป˜่ฎคๅฎฝๅบฆ 832๏ผŒ้ซ˜ๅบฆ 1216
275
- width_input = gr.Slider(label="ๅฎฝๅบฆ", minimum=256, maximum=MAX_IMAGE_SIZE, value=832, step=32)
276
- height_input = gr.Slider(label="้ซ˜ๅบฆ", minimum=256, maximum=MAX_IMAGE_SIZE, value=1216, step=32)
277
 
278
- # ้ป˜่ฎคๆญฅๆ•ฐ 28
279
- num_inference_steps_input = gr.Slider(label="ๆŽจ็†ๆญฅๆ•ฐ", minimum=1, maximum=100, value=28, step=1)
280
- # ้ป˜่ฎค CFG 28๏ผŒไธบไบ†่ƒฝๅ–ๅˆฐ 28๏ผŒๅฐ†ๆœ€ๅคงๅ€ผๆ้ซ˜ๅˆฐ 40
281
- guidance_scale_input = gr.Slider(label="ๅผ•ๅฏผๅผบๅบฆ", minimum=0.0, maximum=40.0, value=28, step=0.1)
282
- seed_number_input = gr.Number(label="็งๅญ (-1 ้šๆœบ)", value=-1, minimum=-1, step=1)
283
- generate_button = gr.Button("๐ŸŽจ ็”ŸๆˆๆตทๆŠฅ", variant="primary", size="lg")
284
 
285
  with gr.Column(scale=1):
286
- gr.Markdown("### 2. ็ป“ๆžœ")
287
- image_output = gr.Image(label="็”Ÿๆˆ็š„ๆตทๆŠฅ", type="pil", height=600)
288
- status_output = gr.Textbox(label="็”Ÿๆˆ็Šถๆ€", lines=2, interactive=False)
289
- recapped_prompt_output = gr.Textbox(label="ๅขžๅผบๅŽ็š„ๆ็คบ่ฏ", lines=5, interactive=False, info="็”จไบŽ็”Ÿๆˆ็š„ๆœ€็ปˆๆ็คบ่ฏ")
290
 
291
  inputs_list = [
292
  prompt_input, enable_recap_checkbox, height_input, width_input,
293
  num_inference_steps_input, guidance_scale_input, seed_number_input
294
  ]
295
- outputs_list = [image_output, status_output, recapped_prompt_output]
296
 
297
  generate_button.click(fn=generate_poster, inputs=inputs_list, outputs=outputs_list)
298
-
299
- # ็คบไพ‹
300
- gr.Examples(
301
- examples=[
302
- ["ๅคๅค็ง‘ๅนป็”ตๅฝฑๆตทๆŠฅ๏ผŒ้œ“่™น่‰ฒๅฝฉๅ’Œ้ฃž่กŒๆฑฝ่ฝฆ"],
303
- ["ไผ˜้›…็š„่ฃ…้ฅฐ่‰บๆœฏ้ฃŽๆ ผ่ฑชๅŽ้…’ๅบ—ๆตทๆŠฅ"],
304
- ["็ฎ€็บฆ้ŸณไนไผšๆตทๆŠฅ๏ผŒ็ฒ—ไฝ“ๅญ—ไฝ“"],
305
- ["ๆœ‰ๆœบๅ’–ๅ•ก็š„ๅคๅคๅนฟๅ‘Š"],
306
- ],
307
- inputs=[prompt_input]
308
- )
309
 
310
  return demo
311
 
 
179
  seed_input,
180
  progress=gr.Progress(track_tqdm=True),
181
  ):
182
+ """Generate poster using preloaded models"""
183
  if not original_prompt or not original_prompt.strip():
184
+ return None, "โŒ Prompt cannot be empty!", ""
185
 
186
  try:
187
  if not HF_TOKEN:
188
+ return None, "โŒ Error: HF_TOKEN not found, please configure authentication.", ""
189
 
190
+ progress(0.1, desc="Starting generation...")
191
 
192
+ # Determine final prompt
193
  final_prompt = original_prompt
194
  if enable_recap:
195
+ progress(0.2, desc="Enhancing prompt...")
196
  final_prompt = enhance_prompt_with_qwen(original_prompt)
197
 
198
+ # Determine seed
199
  actual_seed = int(seed_input) if seed_input and seed_input != -1 else random.randint(1, 2**32 - 1)
200
 
201
+ progress(0.3, desc="Generating image...")
202
 
203
+ # Use preloaded FLUX pipeline to generate image
204
  generator = torch.Generator("cuda").manual_seed(actual_seed)
205
 
206
  with torch.inference_mode():
 
213
  height=int(height)
214
  ).images[0]
215
 
216
+ progress(1.0, desc="Complete!")
217
+ status_log = f"โœ… Generation complete! Seed: {actual_seed}"
218
  return image, status_log, final_prompt
219
 
220
  except Exception as e:
221
+ logging.error(f"Generation failed: {e}")
222
+ return None, f"โŒ Generation failed: {str(e)}", ""
223
 
224
  # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
225
+ # Gradio Interface
226
  # โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€
227
  def create_interface():
228
+ """Create Gradio interface"""
229
 
230
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
231
+ gr.Markdown("# PosterCraft-v1.0")
232
+ gr.Markdown(f"Running on: **{device if 'device' in globals() else 'cuda' if torch.cuda.is_available() else 'cpu'}** | Base Pipeline: **{DEFAULT_PIPELINE_PATH}**")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
233
 
234
  with gr.Row():
235
  with gr.Column(scale=1):
236
+ gr.Markdown("### 1. Configuration")
237
  prompt_input = gr.Textbox(
238
+ label="Prompt",
239
  lines=3,
240
+ placeholder="Enter your creative prompt...",
241
+ value="Retro sci-fi movie poster with neon colors and flying cars"
242
  )
243
  enable_recap_checkbox = gr.Checkbox(
244
+ label="Enable Prompt Recap",
245
  value=True,
246
+ info=f"Uses {DEFAULT_QWEN_MODEL_PATH} for rewriting."
247
  )
248
 
249
  with gr.Row():
250
+ width_input = gr.Slider(label="Width", minimum=256, maximum=2048, value=832, step=64)
251
+ height_input = gr.Slider(label="Height", minimum=256, maximum=2048, value=1216, step=64)
252
+ gr.Markdown("Tip: Recommended size is 832x1216 for best results.")
253
 
254
+ num_inference_steps_input = gr.Slider(label="Inference Steps", minimum=1, maximum=100, value=28, step=1)
255
+ guidance_scale_input = gr.Slider(label="Guidance Scale (CFG)", minimum=0.0, maximum=20.0, value=3.5, step=0.1)
256
+ seed_number_input = gr.Number(label="Seed", value=None, minimum=-1, step=1, info="Leave blank or set to -1 for a random seed.")
257
+ generate_button = gr.Button("Generate Image", variant="primary")
 
 
258
 
259
  with gr.Column(scale=1):
260
+ gr.Markdown("### 2. Results")
261
+ image_output = gr.Image(label="Generated Image", type="pil", show_download_button=True, height=512)
262
+ recapped_prompt_output = gr.Textbox(label="Final Prompt Used", lines=5, interactive=False)
263
+ status_output = gr.Textbox(label="Status Log", lines=4, interactive=False)
264
 
265
  inputs_list = [
266
  prompt_input, enable_recap_checkbox, height_input, width_input,
267
  num_inference_steps_input, guidance_scale_input, seed_number_input
268
  ]
269
+ outputs_list = [image_output, recapped_prompt_output, status_output]
270
 
271
  generate_button.click(fn=generate_poster, inputs=inputs_list, outputs=outputs_list)
 
 
 
 
 
 
 
 
 
 
 
272
 
273
  return demo
274