File size: 3,425 Bytes
a660631
 
 
 
 
 
 
 
 
f521e88
a660631
f521e88
a660631
53b39de
f521e88
a660631
f521e88
a660631
f521e88
a660631
116cf4f
 
a6d82aa
 
f521e88
208f8fb
817e0d9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
329b8dd
817e0d9
329b8dd
 
56b5575
329b8dd
f521e88
a660631
d6252d0
116cf4f
d6252d0
f521e88
a660631
d6252d0
116cf4f
f521e88
d6252d0
f521e88
 
 
 
d6252d0
f521e88
a660631
 
f521e88
a660631
 
681c919
 
 
 
f521e88
681c919
116cf4f
3f0b2d3
681c919
 
 
 
 
a660631
074f54b
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/usr/bin/env python

from __future__ import annotations

import gradio as gr
import torch

from app_depth import create_demo as create_demo_depth
from model import Model
from settings import ALLOW_CHANGING_BASE_MODEL, DEFAULT_MODEL_ID, SHOW_DUPLICATE_BUTTON

DESCRIPTION = "# ControlNet v1.1"

if not torch.cuda.is_available():
    DESCRIPTION += "\n<p>Running on CPU πŸ₯Ά This demo does not work on CPU.</p>"

model = Model(base_model_id=DEFAULT_MODEL_ID, task_name="Canny")

with gr.Blocks(css="style.css") as demo:
    gr.Markdown(DESCRIPTION)
    gr.Button(
        "Duplicate Space for private use",
        elem_id="duplicate-button",
        visible=SHOW_DUPLICATE_BUTTON,
    )

    # Define the inputs and outputs for the interface
    depth_inputs = [
        gr.Image(type="numpy", label="Input Image"),
        gr.Textbox(label="Prompt"),
        gr.Textbox(label="Additional Prompt"),
        gr.Textbox(label="Negative Prompt"),
        gr.Slider(label="Number of Images", minimum=1, maximum=10, step=1, value=1),
        gr.Slider(label="Image Resolution", minimum=256, maximum=1024, step=256, value=512),
        gr.Slider(label="Preprocess Resolution", minimum=128, maximum=512, step=1, value=384),
        gr.Slider(label="Number of Steps", minimum=1, maximum=100, step=1, value=20),
        gr.Slider(label="Guidance Scale", minimum=0.1, maximum=30.0, step=0.1, value=7.5),
        gr.Slider(label="Seed", minimum=0, maximum=1000000, step=1, value=0),
        gr.Radio(label="Preprocessor", choices=["Midas", "DPT", "None"], value="DPT"),
    ]

    depth_outputs = [
        gr.Gallery(label="Output Images"),
    ]

    interfaces = [
        gr.Interface(fn=model.process_depth, inputs=depth_inputs, outputs=depth_outputs, live=False),
    ]

    gr.TabbedInterface(interface_list=interfaces, tab_names=["Depth"])

    with gr.Accordion(label="Base model", open=False):
        with gr.Row():
            with gr.Column(scale=5):
                current_base_model = gr.Textbox(label="Current base model")
            with gr.Column(scale=1):
                check_base_model_button = gr.Button("Check current base model")
        with gr.Row():
            with gr.Column(scale=5):
                new_base_model_id = gr.Textbox(
                    label="New base model",
                    max_lines=1,
                    placeholder="runwayml/stable-diffusion-v1-5",
                    info="The base model must be compatible with Stable Diffusion v1.5.",
                    interactive=ALLOW_CHANGING_BASE_MODEL,
                )
            with gr.Column(scale=1):
                change_base_model_button = gr.Button("Change base model", interactive=ALLOW_CHANGING_BASE_MODEL)
        if not ALLOW_CHANGING_BASE_MODEL:
            gr.Markdown(
                """The base model is not allowed to be changed in this Space so as not to slow down the demo, but it can be changed if you duplicate the Space."""
            )

    check_base_model_button.click(
        fn=lambda: model.base_model_id,
        outputs=current_base_model,
        queue=False,
        api_name="check_base_model",
    )
    gr.update(
        triggers=[new_base_model_id.submit, change_base_model_button.click],
        fn=model.set_base_model,
        inputs=new_base_model_id,
        outputs=current_base_model,
        api_name=False,
    )

if __name__ == "__main__":
    demo.queue(max_size=20).launch()