Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
2b0be71
1
Parent(s):
c1f6c62
Improve mem usage
Browse files
app.py
CHANGED
|
@@ -31,11 +31,13 @@ main_pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
|
| 31 |
safety_checker=None,
|
| 32 |
torch_dtype=torch.float16,
|
| 33 |
).to("cuda")
|
|
|
|
| 34 |
#main_pipe.unet = torch.compile(main_pipe.unet, mode="reduce-overhead", fullgraph=True)
|
| 35 |
#main_pipe.unet.to(memory_format=torch.channels_last)
|
| 36 |
#main_pipe.unet = torch.compile(main_pipe.unet, mode="reduce-overhead", fullgraph=True)
|
| 37 |
#model_id = "stabilityai/sd-x2-latent-upscaler"
|
| 38 |
-
image_pipe = StableDiffusionControlNetImg2ImgPipeline
|
|
|
|
| 39 |
#image_pipe.unet = torch.compile(image_pipe.unet, mode="reduce-overhead", fullgraph=True)
|
| 40 |
#upscaler = StableDiffusionLatentUpscalePipeline.from_pretrained(model_id, torch_dtype=torch.float16)
|
| 41 |
#upscaler.to("cuda")
|
|
@@ -112,7 +114,7 @@ def inference(
|
|
| 112 |
control_image_small = center_crop_resize(control_image)
|
| 113 |
main_pipe.scheduler = SAMPLER_MAP[sampler](main_pipe.scheduler.config)
|
| 114 |
my_seed = random.randint(0, 2**32 - 1) if seed == -1 else seed
|
| 115 |
-
generator = torch.manual_seed(
|
| 116 |
|
| 117 |
out = main_pipe(
|
| 118 |
prompt=prompt,
|
|
|
|
| 31 |
safety_checker=None,
|
| 32 |
torch_dtype=torch.float16,
|
| 33 |
).to("cuda")
|
| 34 |
+
|
| 35 |
#main_pipe.unet = torch.compile(main_pipe.unet, mode="reduce-overhead", fullgraph=True)
|
| 36 |
#main_pipe.unet.to(memory_format=torch.channels_last)
|
| 37 |
#main_pipe.unet = torch.compile(main_pipe.unet, mode="reduce-overhead", fullgraph=True)
|
| 38 |
#model_id = "stabilityai/sd-x2-latent-upscaler"
|
| 39 |
+
image_pipe = StableDiffusionControlNetImg2ImgPipeline(**main_pipe.components)
|
| 40 |
+
|
| 41 |
#image_pipe.unet = torch.compile(image_pipe.unet, mode="reduce-overhead", fullgraph=True)
|
| 42 |
#upscaler = StableDiffusionLatentUpscalePipeline.from_pretrained(model_id, torch_dtype=torch.float16)
|
| 43 |
#upscaler.to("cuda")
|
|
|
|
| 114 |
control_image_small = center_crop_resize(control_image)
|
| 115 |
main_pipe.scheduler = SAMPLER_MAP[sampler](main_pipe.scheduler.config)
|
| 116 |
my_seed = random.randint(0, 2**32 - 1) if seed == -1 else seed
|
| 117 |
+
generator = torch.Generator(device="cuda").manual_seed(seed)
|
| 118 |
|
| 119 |
out = main_pipe(
|
| 120 |
prompt=prompt,
|