Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -14,7 +14,7 @@ HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None #
|
|
14 |
|
15 |
|
16 |
nb_req_simult=80 ########
|
17 |
-
max_pending=
|
18 |
nb_gallery_model=5
|
19 |
|
20 |
|
@@ -24,6 +24,7 @@ tempo_update_actu=3.0
|
|
24 |
now2 = 0
|
25 |
inference_timeout = 300
|
26 |
inference_timeout_w = 70
|
|
|
27 |
MAX_SEED = 2**32-1
|
28 |
|
29 |
|
@@ -94,14 +95,12 @@ async def infer(model_str, prompt, nprompt="", height=None, width=None, steps=No
|
|
94 |
if width is not None and width >= 256: kwargs["width"] = width
|
95 |
if steps is not None and steps >= 1: kwargs["num_inference_steps"] = steps
|
96 |
if cfg is not None and cfg > 0: cfg = kwargs["guidance_scale"] = cfg
|
97 |
-
noise = ""
|
98 |
if seed >= 0: kwargs["seed"] = seed
|
99 |
-
else:
|
100 |
-
|
101 |
-
|
102 |
-
noise += " "
|
103 |
task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn,
|
104 |
-
prompt=
|
105 |
await asyncio.sleep(3)
|
106 |
try:
|
107 |
result = await asyncio.wait_for(task, timeout=timeout)
|
@@ -542,7 +541,7 @@ def fonc_start(id_session,id_module,s,cont,list_models_to_gen):
|
|
542 |
print(model_actu)
|
543 |
print(task_actu)
|
544 |
if use_warm_model:
|
545 |
-
result=gen_fn(model_actu, task_actu["prompt"], task_actu["nprompt"], task_actu["height"], task_actu["width"], task_actu["steps"], task_actu["cfg"], task_actu["seed"],
|
546 |
else:
|
547 |
result=gen_fn(model_actu, task_actu["prompt"], task_actu["nprompt"], task_actu["height"], task_actu["width"], task_actu["steps"], task_actu["cfg"], task_actu["seed"])
|
548 |
print("reception")
|
|
|
14 |
|
15 |
|
16 |
nb_req_simult=80 ########
|
17 |
+
max_pending=3
|
18 |
nb_gallery_model=5
|
19 |
|
20 |
|
|
|
24 |
now2 = 0
|
25 |
inference_timeout = 300
|
26 |
inference_timeout_w = 70
|
27 |
+
inference_timeout_wp = 120
|
28 |
MAX_SEED = 2**32-1
|
29 |
|
30 |
|
|
|
95 |
if width is not None and width >= 256: kwargs["width"] = width
|
96 |
if steps is not None and steps >= 1: kwargs["num_inference_steps"] = steps
|
97 |
if cfg is not None and cfg > 0: cfg = kwargs["guidance_scale"] = cfg
|
|
|
98 |
if seed >= 0: kwargs["seed"] = seed
|
99 |
+
else: kwargs["seed"] = randint(1, MAX_SEED-1)
|
100 |
+
|
101 |
+
|
|
|
102 |
task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn,
|
103 |
+
prompt=prompt, negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
|
104 |
await asyncio.sleep(3)
|
105 |
try:
|
106 |
result = await asyncio.wait_for(task, timeout=timeout)
|
|
|
541 |
print(model_actu)
|
542 |
print(task_actu)
|
543 |
if use_warm_model:
|
544 |
+
result=gen_fn(model_actu, task_actu["prompt"], task_actu["nprompt"], task_actu["height"], task_actu["width"], task_actu["steps"], task_actu["cfg"], task_actu["seed"],inference_timeout_wp)
|
545 |
else:
|
546 |
result=gen_fn(model_actu, task_actu["prompt"], task_actu["nprompt"], task_actu["height"], task_actu["width"], task_actu["steps"], task_actu["cfg"], task_actu["seed"])
|
547 |
print("reception")
|