daniel-dona commited on
Commit
ead58e3
·
verified ·
1 Parent(s): 7ff775d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -16
app.py CHANGED
@@ -62,20 +62,20 @@ def sanitize_seed(seed):
62
  if isinstance(seed, str):
63
  seed = seed.strip()
64
  if seed == "":
65
- return 0
66
  seed_int = int(seed, 10)
67
  elif isinstance(seed, (int, np.integer)):
68
  seed_int = int(seed)
69
  elif isinstance(seed, float) and seed.is_integer():
70
  seed_int = int(seed)
71
  else:
72
- return 0
73
  except (ValueError, TypeError):
74
- return 0
75
 
76
  if 0 <= seed_int <= MAX_SEED:
77
  return seed_int
78
- return 0
79
 
80
  def polish_prompt_en(original_prompt):
81
 
@@ -110,8 +110,9 @@ def infer(
110
  progress=gr.Progress(track_tqdm=True),
111
  ):
112
 
113
-
114
- if randomize_seed:
 
115
  seed = random.randint(0, MAX_SEED)
116
 
117
  width, height = aspect_ratios[aspect_ratio]
@@ -125,7 +126,7 @@ def infer(
125
  height=height,
126
  num_inference_steps=num_inference_steps,
127
  true_cfg_scale=guidance_scale,
128
- generator=torch.Generator(device="cuda").manual_seed(sanitize_seed(seed))
129
  ).images[0]
130
 
131
  return image, seed
@@ -145,6 +146,9 @@ with gr.Blocks(css=css) as demo:
145
  with gr.Column(elem_id="col-container"):
146
  with gr.Row():
147
  gr.Markdown("HINT: Use smaller image size for testing, will consume less of your free GPU time!")
 
 
 
148
 
149
  with gr.Row():
150
  prompt = gr.Text(
@@ -152,14 +156,15 @@ with gr.Blocks(css=css) as demo:
152
  show_label=False,
153
  placeholder="Enter your prompt",
154
  container=False,
155
-
156
  )
157
- run_button = gr.Button("Run", scale=0, variant="primary")
 
 
158
 
159
  result = gr.Image(label="Result", show_label=False)
160
  seed_output = gr.Textbox(label="Used seed", lines=1)
161
 
162
- with gr.Accordion("Advanced Settings", open=True):
163
  negative_prompt = gr.Text(
164
  label="Negative prompt",
165
  max_lines=1,
@@ -170,11 +175,9 @@ with gr.Blocks(css=css) as demo:
170
  seed = gr.Textbox(
171
  lines=1,
172
  label="Manual seed",
173
- info="Manual seed, if not a valid 32 bit integer, will be 0."
174
  )
175
 
176
- randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
177
-
178
  with gr.Row():
179
  aspect_ratio = gr.Dropdown(
180
  label="Image size (aprox.)",
@@ -197,10 +200,11 @@ with gr.Blocks(css=css) as demo:
197
  minimum=1,
198
  maximum=50,
199
  step=1,
200
- value=35,
201
  )
202
 
203
- gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, cache_examples=False, cache_mode="lazy")
 
204
  gr.on(
205
  triggers=[run_button.click, prompt.submit],
206
  fn=infer,
@@ -208,7 +212,6 @@ with gr.Blocks(css=css) as demo:
208
  prompt,
209
  negative_prompt,
210
  seed,
211
- randomize_seed,
212
  aspect_ratio,
213
  guidance_scale,
214
  num_inference_steps,
 
62
  if isinstance(seed, str):
63
  seed = seed.strip()
64
  if seed == "":
65
+ return -1
66
  seed_int = int(seed, 10)
67
  elif isinstance(seed, (int, np.integer)):
68
  seed_int = int(seed)
69
  elif isinstance(seed, float) and seed.is_integer():
70
  seed_int = int(seed)
71
  else:
72
+ return -1
73
  except (ValueError, TypeError):
74
+ return -1
75
 
76
  if 0 <= seed_int <= MAX_SEED:
77
  return seed_int
78
+ return -1
79
 
80
  def polish_prompt_en(original_prompt):
81
 
 
110
  progress=gr.Progress(track_tqdm=True),
111
  ):
112
 
113
+ seed = sanitize_seed(seed)
114
+
115
+ if seed == -1:
116
  seed = random.randint(0, MAX_SEED)
117
 
118
  width, height = aspect_ratios[aspect_ratio]
 
126
  height=height,
127
  num_inference_steps=num_inference_steps,
128
  true_cfg_scale=guidance_scale,
129
+ generator=torch.Generator(device="cuda").manual_seed(seed)
130
  ).images[0]
131
 
132
  return image, seed
 
146
  with gr.Column(elem_id="col-container"):
147
  with gr.Row():
148
  gr.Markdown("HINT: Use smaller image size for testing, will consume less of your free GPU time!")
149
+
150
+ with gr.Row():
151
+ gr.Examples(examples=examples, inputs=[prompt], outputs=[result, seed], fn=infer, examples_per_page=25, cache_examples=False, cache_mode="lazy")
152
 
153
  with gr.Row():
154
  prompt = gr.Text(
 
156
  show_label=False,
157
  placeholder="Enter your prompt",
158
  container=False,
 
159
  )
160
+
161
+ with gr.Row():
162
+ run_button = gr.Button("Generate", scale=0, variant="primary")
163
 
164
  result = gr.Image(label="Result", show_label=False)
165
  seed_output = gr.Textbox(label="Used seed", lines=1)
166
 
167
+ with gr.Accordion("Advanced Settings", open=False):
168
  negative_prompt = gr.Text(
169
  label="Negative prompt",
170
  max_lines=1,
 
175
  seed = gr.Textbox(
176
  lines=1,
177
  label="Manual seed",
178
+ info="Manual seed, otherwise random."
179
  )
180
 
 
 
181
  with gr.Row():
182
  aspect_ratio = gr.Dropdown(
183
  label="Image size (aprox.)",
 
200
  minimum=1,
201
  maximum=50,
202
  step=1,
203
+ value=25,
204
  )
205
 
206
+
207
+
208
  gr.on(
209
  triggers=[run_button.click, prompt.submit],
210
  fn=infer,
 
212
  prompt,
213
  negative_prompt,
214
  seed,
 
215
  aspect_ratio,
216
  guidance_scale,
217
  num_inference_steps,