Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -12,7 +12,7 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
| 12 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16).to(device)
|
| 13 |
|
| 14 |
MAX_SEED = np.iinfo(np.int32).max
|
| 15 |
-
MAX_IMAGE_SIZE =
|
| 16 |
|
| 17 |
@spaces.GPU(duration=190)
|
| 18 |
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
|
|
|
|
| 12 |
pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", torch_dtype=torch.bfloat16).to(device)
|
| 13 |
|
| 14 |
MAX_SEED = np.iinfo(np.int32).max
|
| 15 |
+
MAX_IMAGE_SIZE = 2560
|
| 16 |
|
| 17 |
@spaces.GPU(duration=190)
|
| 18 |
def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
|