Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
CHANGED
@@ -1,92 +1,108 @@
|
|
1 |
import gradio as gr
|
2 |
from argparse import ArgumentParser
|
3 |
import time
|
|
|
4 |
|
5 |
|
6 |
-
def
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
args = parser.parse_args()
|
16 |
-
return args
|
17 |
-
|
18 |
|
19 |
-
def add_message(history, message):
|
20 |
-
for x in message["files"]:
|
21 |
-
history.append({"role": "user", "content": {"path": x}})
|
22 |
-
if message["text"] is not None:
|
23 |
-
history.append({"role": "user", "content": message["text"]})
|
24 |
-
return history, gr.MultimodalTextbox(value=None, interactive=False)
|
25 |
|
26 |
-
def
|
27 |
-
"""Reset the user input field."""
|
28 |
-
return gr.Textbox.update(value='')
|
29 |
-
|
30 |
-
|
31 |
-
def reset_state(task_history):
|
32 |
"""Reset the chat history."""
|
33 |
-
return []
|
34 |
|
35 |
|
36 |
def regenerate(task_history):
|
37 |
"""Regenerate the last bot response."""
|
38 |
-
|
|
|
39 |
task_history.pop()
|
40 |
if task_history:
|
41 |
-
task_history
|
|
|
|
|
42 |
|
43 |
def predict(chatbot):
|
44 |
"""Generate a response from the model."""
|
45 |
if len(chatbot) > 0:
|
46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
else:
|
48 |
response = "**That's cool!**"
|
49 |
-
|
50 |
-
|
51 |
-
chatbot[-1]["content"] += character
|
52 |
-
time.sleep(0.05)
|
53 |
-
yield chatbot
|
54 |
|
55 |
|
56 |
def _launch_demo(args):
|
57 |
with gr.Blocks() as demo:
|
58 |
gr.Markdown("""<center><font size=8>ChatBot Dummy</center>""")
|
59 |
-
chatbot = gr.Chatbot(
|
60 |
-
|
61 |
-
user_input = gr.MultimodalTextbox(
|
62 |
-
interactive=True,
|
63 |
-
placeholder="Enter message or upload file...",
|
64 |
-
show_label=False,
|
65 |
-
file_types=["audio"],
|
66 |
-
# sources=["microphone", "upload"],
|
67 |
)
|
|
|
|
|
|
|
68 |
|
69 |
with gr.Row():
|
70 |
-
|
71 |
regen_btn = gr.Button("🤔️ Regenerate (重试)")
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
80 |
|
81 |
demo.queue().launch(
|
82 |
share=False,
|
83 |
inbrowser=args.inbrowser,
|
84 |
server_port=args.server_port,
|
85 |
server_name=args.server_name,
|
86 |
-
max_threads=4
|
87 |
)
|
88 |
|
89 |
|
90 |
if __name__ == "__main__":
|
91 |
-
|
92 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
from argparse import ArgumentParser
|
3 |
import time
|
4 |
+
from pathlib import Path
|
5 |
|
6 |
|
7 |
+
def add_message(history, mic, text):
|
8 |
+
if not mic and not text:
|
9 |
+
return history, "Input is empty"
|
10 |
+
if mic and Path(mic).exists():
|
11 |
+
history.append({"role": "user", "content": {"path": mic}})
|
12 |
+
if text:
|
13 |
+
history.append({"role": "user", "content": text})
|
14 |
+
print(f"{history=}")
|
15 |
+
return history, None
|
|
|
|
|
|
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
+
def reset_state():
|
|
|
|
|
|
|
|
|
|
|
19 |
"""Reset the chat history."""
|
20 |
+
return []
|
21 |
|
22 |
|
23 |
def regenerate(task_history):
|
24 |
"""Regenerate the last bot response."""
|
25 |
+
while task_history and task_history[-1]["role"] == "assistant":
|
26 |
+
print(f"discard {task_history[-1]}")
|
27 |
task_history.pop()
|
28 |
if task_history:
|
29 |
+
print(f"{task_history=}")
|
30 |
+
return predict(task_history)
|
31 |
+
|
32 |
|
33 |
def predict(chatbot):
|
34 |
"""Generate a response from the model."""
|
35 |
if len(chatbot) > 0:
|
36 |
+
for item in reversed(chatbot):
|
37 |
+
if item["role"] != "user":
|
38 |
+
break
|
39 |
+
if isinstance(item["content"], dict):
|
40 |
+
chatbot.append({"role": "assistant", "content": item["content"]})
|
41 |
+
elif isinstance(item["content"], tuple):
|
42 |
+
chatbot.append({"role": "assistant", "content": {"path":item["content"][0]}})
|
43 |
+
else:
|
44 |
+
chatbot.append({"role": "assistant", "content": f"echo {item['content']}"})
|
45 |
else:
|
46 |
response = "**That's cool!**"
|
47 |
+
chatbot.append({"role": "assistant", "content": response})
|
48 |
+
return chatbot
|
|
|
|
|
|
|
49 |
|
50 |
|
51 |
def _launch_demo(args):
|
52 |
with gr.Blocks() as demo:
|
53 |
gr.Markdown("""<center><font size=8>ChatBot Dummy</center>""")
|
54 |
+
chatbot = gr.Chatbot(
|
55 |
+
elem_id="chatbot", bubble_full_width=False, type="messages"
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
)
|
57 |
+
# task_history = gr.State([])
|
58 |
+
mic = gr.Audio(type="filepath")
|
59 |
+
text = gr.Textbox(placeholder="Enter message ...")
|
60 |
|
61 |
with gr.Row():
|
62 |
+
clean_btn = gr.Button("🧹 Clean History (清除历史)")
|
63 |
regen_btn = gr.Button("🤔️ Regenerate (重试)")
|
64 |
+
submit_btn = gr.Button("🚀 Submit")
|
65 |
+
|
66 |
+
def on_submit(chatbot, mic, text):
|
67 |
+
history, error = add_message(chatbot, mic, text)
|
68 |
+
if error:
|
69 |
+
gr.Warning(error) # 显示警告消息
|
70 |
+
return history, None, None
|
71 |
+
else:
|
72 |
+
return predict(history), None, None
|
73 |
+
|
74 |
+
submit_btn.click(
|
75 |
+
fn=on_submit,
|
76 |
+
inputs=[chatbot, mic, text],
|
77 |
+
outputs=[chatbot, mic, text],
|
78 |
+
)
|
79 |
+
clean_btn.click(reset_state, outputs=[chatbot], show_progress=True)
|
80 |
+
regen_btn.click(regenerate, [chatbot], [chatbot], show_progress=True)
|
81 |
|
82 |
demo.queue().launch(
|
83 |
share=False,
|
84 |
inbrowser=args.inbrowser,
|
85 |
server_port=args.server_port,
|
86 |
server_name=args.server_name,
|
87 |
+
max_threads=4,
|
88 |
)
|
89 |
|
90 |
|
91 |
if __name__ == "__main__":
|
92 |
+
parser = ArgumentParser()
|
93 |
+
parser.add_argument(
|
94 |
+
"--inbrowser",
|
95 |
+
action="store_true",
|
96 |
+
default=False,
|
97 |
+
help="Automatically launch the interface in a new tab on the default browser.",
|
98 |
+
)
|
99 |
+
parser.add_argument(
|
100 |
+
"--server-port", type=int, default=7860, help="Demo server port."
|
101 |
+
)
|
102 |
+
parser.add_argument(
|
103 |
+
"--server-name", type=str, default="0.0.0.0", help="Demo server name."
|
104 |
+
)
|
105 |
+
|
106 |
+
args = parser.parse_args()
|
107 |
+
|
108 |
+
_launch_demo(args)
|