LPX55 commited on
Commit
4714f6a
·
verified ·
1 Parent(s): c49a645

Update app_v4.py

Browse files
Files changed (1) hide show
  1. app_v4.py +51 -7
app_v4.py CHANGED
@@ -173,6 +173,53 @@ def generate_image(prompt, scale, steps, control_image, controlnet_conditioning_
173
  ).images[0]
174
  # print("Type: " + str(type(image)))
175
  return image
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
 
177
  with gr.Blocks(title="FLUX Turbo Upscaler", fill_height=True) as demo:
178
  gr.Markdown("⚠️ WIP SPACE - UNFINISHED & BUGGY")
@@ -232,8 +279,8 @@ with gr.Blocks(title="FLUX Turbo Upscaler", fill_height=True) as demo:
232
  x_ip_token = request.headers['x-ip-token']
233
  client = Client("LPX55/zerogpu-experiments", hf_token=huggingface_token, headers={"x-ip-token": x_ip_token})
234
  cresult = client.predict(
235
- n=3,
236
- api_name="/predict"
237
  )
238
  print(f"X TOKEN: {x_ip_token}")
239
  print(cresult)
@@ -261,13 +308,12 @@ with gr.Blocks(title="FLUX Turbo Upscaler", fill_height=True) as demo:
261
  caption_state = gr.State()
262
  focus_state = gr.State()
263
  log_state = gr.State()
264
-
265
  generate_button.click(
266
  fn=process_image,
267
  inputs=[
268
  control_image, prompt, system_prompt, scale, steps,
269
  controlnet_conditioning_scale, guidance_scale, seed,
270
- guidance_end, temperature_slider, top_p_slider, max_tokens_slider, log_prompt
271
  ],
272
  outputs=[log_state, generated_image, prompt]
273
  )
@@ -297,12 +343,10 @@ with gr.Blocks(title="FLUX Turbo Upscaler", fill_height=True) as demo:
297
  inputs=[caption_state, focus_state],
298
  outputs=[prompt]
299
  )
300
-
301
  def hello(profile: gr.OAuthProfile | None) -> str:
302
  if profile is None:
303
  return "Hello guest! There is a bug with HF ZeroGPUs that are afffecting some usage on certain spaces. Testing out some possible solutions."
304
  return f"You are logged in as {profile.name}. If you run into incorrect messages about ZeroGPU runtime credits being out, PLEASE give me a heads up so I can investigate further."
305
 
306
-
307
  demo.load(hello, inputs=None, outputs=msg1)
308
- demo.queue().launch(show_error=True)
 
173
  ).images[0]
174
  # print("Type: " + str(type(image)))
175
  return image
176
+ progress = gr.Progress(track_tqdm=True)
177
+
178
+ def process_image(control_image, user_prompt, system_prompt, scale, steps,
179
+ controlnet_conditioning_scale, guidance_scale, seed,
180
+ guidance_end, temperature, top_p, max_new_tokens, log_prompt, progress):
181
+ # Initialize with empty caption
182
+ final_prompt = user_prompt.strip()
183
+ # If no user prompt provided, generate a caption first
184
+ if not final_prompt:
185
+ # Generate a detailed caption
186
+ with progress:
187
+ progress(0.1, "Generating caption...")
188
+ mcaption = model.caption(control_image, length="normal")
189
+ detailed_caption = mcaption["caption"]
190
+ final_prompt = detailed_caption
191
+ yield f"Using caption: {final_prompt}", None, final_prompt
192
+
193
+ # Show the final prompt being used
194
+ with progress:
195
+ progress(0.3, "Generating with prompt...")
196
+ yield f"Generating with: {final_prompt}", None, final_prompt
197
+
198
+ # Generate the image
199
+ try:
200
+ with progress:
201
+ progress(0.5, "Generating image...")
202
+ image = generate_image(
203
+ prompt=final_prompt,
204
+ scale=scale,
205
+ steps=steps,
206
+ control_image=control_image,
207
+ controlnet_conditioning_scale=controlnet_conditioning_scale,
208
+ guidance_scale=guidance_scale,
209
+ seed=seed,
210
+ guidance_end=guidance_end
211
+ )
212
+
213
+ try:
214
+ debug_img = Image.open(image.save("/tmp/" + str(seed) + "output.png"))
215
+ save_image("/tmp/" + str(seed) + "output.png", debug_img)
216
+ except Exception as e:
217
+ print("Error 160: " + str(e))
218
+ log_params(final_prompt, scale, steps, controlnet_conditioning_scale, guidance_scale, seed, guidance_end, control_image, image)
219
+ yield f"Completed! Used prompt: {final_prompt}", image, final_prompt
220
+ except Exception as e:
221
+ print("Error: " + str(e))
222
+ yield f"Error: {str(e)}", None, None
223
 
224
  with gr.Blocks(title="FLUX Turbo Upscaler", fill_height=True) as demo:
225
  gr.Markdown("⚠️ WIP SPACE - UNFINISHED & BUGGY")
 
279
  x_ip_token = request.headers['x-ip-token']
280
  client = Client("LPX55/zerogpu-experiments", hf_token=huggingface_token, headers={"x-ip-token": x_ip_token})
281
  cresult = client.predict(
282
+ n=3,
283
+ api_name="/predict"
284
  )
285
  print(f"X TOKEN: {x_ip_token}")
286
  print(cresult)
 
308
  caption_state = gr.State()
309
  focus_state = gr.State()
310
  log_state = gr.State()
 
311
  generate_button.click(
312
  fn=process_image,
313
  inputs=[
314
  control_image, prompt, system_prompt, scale, steps,
315
  controlnet_conditioning_scale, guidance_scale, seed,
316
+ guidance_end, temperature_slider, top_p_slider, max_tokens_slider, log_prompt, progress
317
  ],
318
  outputs=[log_state, generated_image, prompt]
319
  )
 
343
  inputs=[caption_state, focus_state],
344
  outputs=[prompt]
345
  )
 
346
  def hello(profile: gr.OAuthProfile | None) -> str:
347
  if profile is None:
348
  return "Hello guest! There is a bug with HF ZeroGPUs that are afffecting some usage on certain spaces. Testing out some possible solutions."
349
  return f"You are logged in as {profile.name}. If you run into incorrect messages about ZeroGPU runtime credits being out, PLEASE give me a heads up so I can investigate further."
350
 
 
351
  demo.load(hello, inputs=None, outputs=msg1)
352
+ demo.queue().launch(show_error=True)