Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -53,7 +53,7 @@ pipe = EllaXLPipeline(pipe,f'{pipeline_path}/pytorch_model.bin')
|
|
53 |
# print(f"Optimizing finished successfully after {time.time()-t} secs")
|
54 |
|
55 |
@spaces.GPU(enable_queue=True)
|
56 |
-
def infer(prompt,negative_prompt,seed,resolution):
|
57 |
print(f"""
|
58 |
—/n
|
59 |
{prompt}
|
@@ -73,7 +73,7 @@ def infer(prompt,negative_prompt,seed,resolution):
|
|
73 |
|
74 |
w,h = resolution.split()
|
75 |
w,h = int(w),int(h)
|
76 |
-
image = pipe(prompt,num_inference_steps=
|
77 |
print(f'gen time is {time.time()-t} secs')
|
78 |
|
79 |
# Future
|
@@ -104,6 +104,7 @@ with gr.Blocks(css=css) as demo:
|
|
104 |
prompt_in = gr.Textbox(label="Prompt", value="A smiling man with wavy brown hair and a trimmed beard")
|
105 |
resolution = gr.Dropdown(value=resolutions[0], show_label=True, label="Resolution", choices=resolutions)
|
106 |
seed = gr.Textbox(label="Seed", value=-1)
|
|
|
107 |
negative_prompt = gr.Textbox(label="Negative Prompt", value=default_negative_prompt)
|
108 |
submit_btn = gr.Button("Generate")
|
109 |
result = gr.Image(label="BRIA-2.4 Result")
|
@@ -131,7 +132,8 @@ with gr.Blocks(css=css) as demo:
|
|
131 |
prompt_in,
|
132 |
negative_prompt,
|
133 |
seed,
|
134 |
-
resolution
|
|
|
135 |
],
|
136 |
outputs = [
|
137 |
result
|
|
|
53 |
# print(f"Optimizing finished successfully after {time.time()-t} secs")
|
54 |
|
55 |
@spaces.GPU(enable_queue=True)
|
56 |
+
def infer(prompt,negative_prompt,seed,resolution, steps):
|
57 |
print(f"""
|
58 |
—/n
|
59 |
{prompt}
|
|
|
73 |
|
74 |
w,h = resolution.split()
|
75 |
w,h = int(w),int(h)
|
76 |
+
image = pipe(prompt,num_inference_steps=steps, negative_prompt=negative_prompt,generator=generator,width=w,height=h).images[0]
|
77 |
print(f'gen time is {time.time()-t} secs')
|
78 |
|
79 |
# Future
|
|
|
104 |
prompt_in = gr.Textbox(label="Prompt", value="A smiling man with wavy brown hair and a trimmed beard")
|
105 |
resolution = gr.Dropdown(value=resolutions[0], show_label=True, label="Resolution", choices=resolutions)
|
106 |
seed = gr.Textbox(label="Seed", value=-1)
|
107 |
+
steps = gr.Textbox(label="Steps", value=50)
|
108 |
negative_prompt = gr.Textbox(label="Negative Prompt", value=default_negative_prompt)
|
109 |
submit_btn = gr.Button("Generate")
|
110 |
result = gr.Image(label="BRIA-2.4 Result")
|
|
|
132 |
prompt_in,
|
133 |
negative_prompt,
|
134 |
seed,
|
135 |
+
resolution,
|
136 |
+
steps
|
137 |
],
|
138 |
outputs = [
|
139 |
result
|