Spaces:
Running
on
Zero
Running
on
Zero
| import spaces | |
| import torch | |
| from PIL import Image | |
| from diffusers import QwenImageEditPlusPipeline | |
| from diffusers.utils import load_image | |
| import gradio as gr | |
| pipe = QwenImageEditPlusPipeline.from_pretrained( | |
| "Qwen/Qwen-Image-Edit-2509", torch_dtype=torch.bfloat16 | |
| ).to("cuda") | |
| def extract_clothes(img1): | |
| pipe.load_lora_weights("JamesDigitalOcean/Qwen_Image_Edit_Extract_Clothing", weight_names = "qwen_image_edit_remove_body.safetensors", adapter_names = "removebody") | |
| pil_image = Image.fromarray(img1, 'RGB') | |
| # image_1 = load_image(img1) | |
| # print(type(image_1)) | |
| image = pipe( | |
| image=[pil_image], | |
| prompt="removebody remove the person from this image, but leave the outfit. the clothes should remain after deleting the person's body, skin, and hair. leave the clothes in front of a white background", | |
| num_inference_steps=50 | |
| ).images[0] | |
| pipe.delete_adapters("removebody") | |
| return image | |
| def tryon_clothes(img2, img3): | |
| pipe.load_lora_weights("JamesDigitalOcean/Qwen_Image_Edit_Try_On_Clothes", weight_names = "qwen_image_edit_tryon.safetensors", adapter_names = "tryonclothes") | |
| pil_image2 = Image.fromarray(img2, 'RGB') | |
| pil_image3 = Image.fromarray(img3, 'RGB') | |
| image = pipe( | |
| image=[pil_image2, pil_image3], | |
| prompt="tryon_clothes dress the clothing onto the person", | |
| num_inference_steps=50 | |
| ).images[0] | |
| pipe.delete_adapters("tryonclothes") | |
| return image | |
| with gr.Blocks() as demo: | |
| gr.Markdown("<div style='display:flex;justify-content:center;align-items:center;gap:.5rem;font-size:24px;'>π <strong>Qwen Image Edit Clothing Try On</strong> π</div>") | |
| with gr.Column(): | |
| with gr.Row(): | |
| img1 = gr.Image(label = 'Upload Clothing Image') | |
| img2 = gr.Image(label = 'Extracted Clothing') | |
| img3 = gr.Image(label = 'Upload Model Photo') | |
| img4 = gr.Image(label = 'Put The Clothing onto Model') | |
| with gr.Column(): | |
| with gr.Row(): | |
| remove_button = gr.Button('Extract Clothing') | |
| tryon_button = gr.Button('Put Clothing on Model') | |
| gr.on( | |
| triggers=[remove_button.click], | |
| fn=extract_clothes, | |
| inputs=[img1], | |
| outputs=[img2], | |
| ) | |
| gr.on( | |
| triggers=[tryon_button.click], | |
| fn=tryon_clothes, | |
| inputs=[img2, img3], | |
| outputs=[img4], | |
| ) | |
| # show_case = gr.Examples( | |
| # examples=[ | |
| # ["assets/clothes/img3.jpeg", "assets/example_outputs/img1-clothing.webp", "assets/clothingmodels/img1.jpeg", "assets/example_outputs/image-1-clothingmodel.webp"], | |
| # ["assets/clothes/img6.jpeg", "assets/example_outputs/image2-clothing.webp", "assets/clothingmodels/img15.jpeg", "assets/example_outputs/image2-clothingmodel.webp"], | |
| # ], | |
| # inputs=[img1, img2, img3, img4], | |
| # label='Examples' | |
| # ) | |
| if __name__ == "__main__": | |
| demo.launch(share=True) |