ysharma's picture
ysharma HF Staff
kkj
728c352
import gradio as gr
from PIL import Image
import cv2
import numpy as np
#demo_inf_lama = gr.Interface.load(name="spaces/CVPR/lama-example") #gr.Blocks.load
#img = gr.Interface.load("spaces/multimodalart/latentdiffusion")(ent[0],'50','256','256','1',10)[0]
#assert demo_inf("Foo", "bar") == "Foo bar"
def get_lama(img1, img2):
print("********Inside LAMA ********")
#Img1 = cv2.imread(img1) #Image.open(img1)
#Img2 = cv2.imread(img2) #Image.open(img2)
#img1_pil = Image.fromarray(Img1)
#img2_pil = Image.fromarray(Img2)
#img, mask = gr.Interface.load(name="spaces/CVPR/lama-example")(img1, img2, "automatic (U2net)")
img, mask = gr.Interface.load(name="spaces/CVPR/lama-example")(img1, img2, "manual")
#img = cv2.imread(img)
#im.save("./data/data_mask.png")
#im.save(img)
im = Image.open(img)
im.save("./transformed_image.jpg")
#print("Going back to Workflow")
#return img, mask
return im, mask
return "./transformed_image.jpg", mask
#inputs = [gr.inputs.Image(type='pil', label="Original Image"),gr.inputs.Image(type='pil',source="canvas", label="Mask",invert_colors=True),gr.inputs.Radio(choices=["automatic (U2net)","manual"], type="value", default="manual", label="Masking option")]
#outputs = [gr.outputs.Image(type="file",label="output"),gr.outputs.Image(type="pil",label="Mask")]
#io5a = gr.Interface(lambda x: x, gr.Image(source='upload', tool='color-sketch'), gr.Image())
demo = gr.Blocks()
with demo:
gr.Markdown("<h1><center>Testing</center></h1>")
gr.Markdown(
"""Testing Inference for Gradio. Work in Progress."""
)
with gr.Row():
in_image = gr.Image(type='filepath')
#in_image_mask = gr.Image(type='filepath')
in_image_mask2 = gr.inputs.Image(type='filepath',source="canvas", label="Mask") #,invert_colors=True) #(type='filepath')
out_image = gr.outputs.Image(type='pil') #(type='file')
out_image2 = gr.outputs.Image(type="pil",label="Mask")
b1 = gr.Button("Image Button")
#b2 = gr.Button("Text Button")
b1.click(get_lama,inputs=[in_image, in_image_mask2], outputs=[out_image,out_image2])
demo.launch(enable_queue=True, debug=True)