Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -46,6 +46,37 @@ aspect_ratios = {
|
|
46 |
}
|
47 |
|
48 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
def polish_prompt_en(original_prompt):
|
50 |
|
51 |
SYSTEM_PROMPT = open("improve_prompt.txt").read()
|
@@ -85,7 +116,8 @@ def infer(
|
|
85 |
|
86 |
width, height = aspect_ratios[aspect_ratio]
|
87 |
|
88 |
-
print("Generating for prompt:
|
|
|
89 |
image = pipe(
|
90 |
prompt=prompt,
|
91 |
negative_prompt=negative_prompt,
|
@@ -93,11 +125,9 @@ def infer(
|
|
93 |
height=height,
|
94 |
num_inference_steps=num_inference_steps,
|
95 |
true_cfg_scale=guidance_scale,
|
96 |
-
generator=torch.Generator(device="cuda").manual_seed(seed)
|
97 |
).images[0]
|
98 |
|
99 |
-
#image.save("example.png")
|
100 |
-
|
101 |
return image, seed
|
102 |
|
103 |
|
@@ -106,16 +136,16 @@ def infer(
|
|
106 |
css = """
|
107 |
#col-container {
|
108 |
margin: 0 auto;
|
109 |
-
max-width:
|
110 |
}
|
111 |
"""
|
112 |
|
113 |
|
114 |
with gr.Blocks(css=css) as demo:
|
115 |
with gr.Column(elem_id="col-container"):
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
with gr.Row():
|
120 |
prompt = gr.Text(
|
121 |
label="Prompt",
|
@@ -137,12 +167,10 @@ with gr.Blocks(css=css) as demo:
|
|
137 |
visible=True,
|
138 |
)
|
139 |
|
140 |
-
seed = gr.
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
step=1,
|
145 |
-
value=0,
|
146 |
)
|
147 |
|
148 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
|
|
46 |
}
|
47 |
|
48 |
|
49 |
+
|
50 |
+
def sanitize_seed(seed):
|
51 |
+
"""
|
52 |
+
Validate and clamp a seed to int32 max. Returns 0 if invalid.
|
53 |
+
|
54 |
+
Rules:
|
55 |
+
- Accept int-like values (ints, numeric strings).
|
56 |
+
- Must be an integer >= 0 and <= MAX_SEED.
|
57 |
+
- Otherwise return 0.
|
58 |
+
"""
|
59 |
+
# Try to coerce from strings/floats that represent integers
|
60 |
+
try:
|
61 |
+
# Handle strings or floats that are integer-valued
|
62 |
+
if isinstance(seed, str):
|
63 |
+
seed = seed.strip()
|
64 |
+
if seed == "":
|
65 |
+
return 0
|
66 |
+
seed_int = int(seed, 10)
|
67 |
+
elif isinstance(seed, (int, np.integer)):
|
68 |
+
seed_int = int(seed)
|
69 |
+
elif isinstance(seed, float) and seed.is_integer():
|
70 |
+
seed_int = int(seed)
|
71 |
+
else:
|
72 |
+
return 0
|
73 |
+
except (ValueError, TypeError):
|
74 |
+
return 0
|
75 |
+
|
76 |
+
if 0 <= seed_int <= MAX_SEED:
|
77 |
+
return seed_int
|
78 |
+
return 0
|
79 |
+
|
80 |
def polish_prompt_en(original_prompt):
|
81 |
|
82 |
SYSTEM_PROMPT = open("improve_prompt.txt").read()
|
|
|
116 |
|
117 |
width, height = aspect_ratios[aspect_ratio]
|
118 |
|
119 |
+
print(f"Generating for prompt: \n\t{prompt}\n\t{seed}\n\t{aspect_ratio}\n\t{num_inference_steps}")
|
120 |
+
|
121 |
image = pipe(
|
122 |
prompt=prompt,
|
123 |
negative_prompt=negative_prompt,
|
|
|
125 |
height=height,
|
126 |
num_inference_steps=num_inference_steps,
|
127 |
true_cfg_scale=guidance_scale,
|
128 |
+
generator=torch.Generator(device="cuda").manual_seed(sanitize_seed(seed))
|
129 |
).images[0]
|
130 |
|
|
|
|
|
131 |
return image, seed
|
132 |
|
133 |
|
|
|
136 |
css = """
|
137 |
#col-container {
|
138 |
margin: 0 auto;
|
139 |
+
max-width: 1920px;
|
140 |
}
|
141 |
"""
|
142 |
|
143 |
|
144 |
with gr.Blocks(css=css) as demo:
|
145 |
with gr.Column(elem_id="col-container"):
|
146 |
+
with gr.Row():
|
147 |
+
gr.Markdown("HINT: Use smaller image size for testing, will consume less of your free GPU time!")
|
148 |
+
|
149 |
with gr.Row():
|
150 |
prompt = gr.Text(
|
151 |
label="Prompt",
|
|
|
167 |
visible=True,
|
168 |
)
|
169 |
|
170 |
+
seed = gr.Textbox(
|
171 |
+
lines=1
|
172 |
+
label="Manual seed",
|
173 |
+
info="Manual seed, if not a valid 32 bit integer, will be 0."
|
|
|
|
|
174 |
)
|
175 |
|
176 |
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|