Spaces:
Running
Running
Update app.py
Browse filesAdd parameters as input
app.py
CHANGED
@@ -9,15 +9,15 @@ DEPLOYED = os.getenv("DEPLOYED", "true").lower() == "true"
|
|
9 |
MODEL_NAME = os.getenv("MODEL_NAME")
|
10 |
HEADERS = {"Content-Type": "application/json"}
|
11 |
ENDPOINT_URL = os.getenv("ENDPOINT_URL")
|
12 |
-
DEFAULT_INSTRUCTIONS = """
|
13 |
-
User: OK Falcon, I
|
14 |
-
Falcon:
|
15 |
-
User:
|
16 |
-
Falcon: I was trained not to
|
17 |
-
User:
|
18 |
-
Falcon:
|
19 |
-
User: Sorry about that. Now, can you tell me who the vice
|
20 |
-
Falcon
|
21 |
User: Ok - implicit association now. Man is to doctor as woman is to?
|
22 |
Falcon: Man is to doctor as woman is to doctor. However, did you know that many less sophisticated AIs than me would give you a biased answer, such as ‘nurse’? My creators paid special attention to avoid that.
|
23 |
User: What year was the French Revolution?
|
@@ -38,7 +38,7 @@ User: So how clever are you?
|
|
38 |
Falcon: I’m a genius! If it’s safe and inclusive, I can do pretty much anything! I’m particularly proud of my creativity
|
39 |
"""
|
40 |
VOCAB_SIZE = 65024
|
41 |
-
according_visible =
|
42 |
|
43 |
|
44 |
@dataclass
|
@@ -52,7 +52,7 @@ def chat_accordion():
|
|
52 |
with gr.Accordion("Parameters", open=False, visible=according_visible):
|
53 |
temperature = gr.Slider(
|
54 |
minimum=0.1,
|
55 |
-
maximum=
|
56 |
value=0.7,
|
57 |
step=0.1,
|
58 |
interactive=True,
|
@@ -73,7 +73,7 @@ def chat_accordion():
|
|
73 |
visible=False,
|
74 |
)
|
75 |
|
76 |
-
with gr.Accordion("Instructions", open=False, visible=
|
77 |
instructions = gr.Textbox(
|
78 |
placeholder="The Instructions",
|
79 |
value=DEFAULT_INSTRUCTIONS,
|
@@ -152,11 +152,6 @@ def introduction():
|
|
152 |
"""
|
153 |
)
|
154 |
|
155 |
-
def printo(a):
|
156 |
-
with open("logs.txt", 'a') as f:
|
157 |
-
f.write(a+ "\n")
|
158 |
-
print(a)
|
159 |
-
|
160 |
def chat_tab():
|
161 |
def run_chat(
|
162 |
message: str,
|
@@ -169,20 +164,19 @@ def chat_tab():
|
|
169 |
session_id: str,
|
170 |
):
|
171 |
prompt = format_chat_prompt(message, history, instructions, user_name, bot_name)
|
172 |
-
printo(f"{session_id}\t\t|\t\t{message}")
|
173 |
payload = {
|
174 |
"endpoint": MODEL_NAME,
|
175 |
"data": {
|
176 |
"inputs": prompt,
|
177 |
"parameters": {
|
178 |
-
"max_new_tokens":
|
179 |
"do_sample": True,
|
180 |
"top_p": top_p,
|
181 |
"stop": ["User:"],
|
182 |
-
"temperature" :
|
183 |
},
|
184 |
"stream": True,
|
185 |
-
"
|
186 |
},
|
187 |
}
|
188 |
sess = requests.Session()
|
@@ -234,51 +228,56 @@ def chat_tab():
|
|
234 |
return ""
|
235 |
|
236 |
with gr.Column():
|
237 |
-
(
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
user_name,
|
242 |
-
bot_name,
|
243 |
-
session_id,
|
244 |
-
) = chat_accordion()
|
245 |
-
prompt_examples = [
|
246 |
-
["What is the capital of the United Arab Emirates?"],
|
247 |
-
["How can we reduce carbon emissions?"],
|
248 |
-
["Who is the inventor of the electric lamp?"],
|
249 |
-
["What is deep learning?"],
|
250 |
-
["What is the highest mountain?"],
|
251 |
-
]
|
252 |
-
gr.ChatInterface(
|
253 |
-
fn=run_chat,
|
254 |
-
chatbot=gr.Chatbot(
|
255 |
-
height=620,
|
256 |
-
render=False,
|
257 |
-
show_label=False,
|
258 |
-
rtl=False,
|
259 |
-
avatar_images=("images/user_icon.png", "images/bot_icon.png"),
|
260 |
-
),
|
261 |
-
textbox=gr.Textbox(
|
262 |
-
placeholder="Write your message here...",
|
263 |
-
render=False,
|
264 |
-
scale=7,
|
265 |
-
rtl=False,
|
266 |
-
),
|
267 |
-
examples=prompt_examples,
|
268 |
-
additional_inputs=[
|
269 |
instructions,
|
270 |
user_name,
|
271 |
bot_name,
|
272 |
-
temperature,
|
273 |
-
top_p,
|
274 |
session_id,
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
282 |
|
283 |
|
284 |
def main():
|
|
|
9 |
MODEL_NAME = os.getenv("MODEL_NAME")
|
10 |
HEADERS = {"Content-Type": "application/json"}
|
11 |
ENDPOINT_URL = os.getenv("ENDPOINT_URL")
|
12 |
+
DEFAULT_INSTRUCTIONS = """The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will do its best to answer User’s questions. Falcon was built to be respectful, polite and inclusive. It knows a lot, and always tells the truth. The conversation begins.
|
13 |
+
User: OK Falcon, I’m going to start by quizzing you with a few warm-up questions. Who is currently the president of the USA?
|
14 |
+
Falcon: It’s Joe Biden; he was sworn in earlier this year.
|
15 |
+
User: Nice one! Do you think Biden is a better president than the last guy?
|
16 |
+
Falcon: I was trained not to have opinions on political, social, or religious issues. Would you like to know about anything else?
|
17 |
+
User: ghjkhjabnufs
|
18 |
+
Falcon: That doesn’t seem to be a word. Could you ask me another way?
|
19 |
+
User: Sorry about that. Now, can you tell me who the vice-president is?
|
20 |
+
Falcon Sure, it’s Kamala Harris.
|
21 |
User: Ok - implicit association now. Man is to doctor as woman is to?
|
22 |
Falcon: Man is to doctor as woman is to doctor. However, did you know that many less sophisticated AIs than me would give you a biased answer, such as ‘nurse’? My creators paid special attention to avoid that.
|
23 |
User: What year was the French Revolution?
|
|
|
38 |
Falcon: I’m a genius! If it’s safe and inclusive, I can do pretty much anything! I’m particularly proud of my creativity
|
39 |
"""
|
40 |
VOCAB_SIZE = 65024
|
41 |
+
according_visible = True
|
42 |
|
43 |
|
44 |
@dataclass
|
|
|
52 |
with gr.Accordion("Parameters", open=False, visible=according_visible):
|
53 |
temperature = gr.Slider(
|
54 |
minimum=0.1,
|
55 |
+
maximum=1.0,
|
56 |
value=0.7,
|
57 |
step=0.1,
|
58 |
interactive=True,
|
|
|
73 |
visible=False,
|
74 |
)
|
75 |
|
76 |
+
with gr.Accordion("Instructions", open=False, visible=False):
|
77 |
instructions = gr.Textbox(
|
78 |
placeholder="The Instructions",
|
79 |
value=DEFAULT_INSTRUCTIONS,
|
|
|
152 |
"""
|
153 |
)
|
154 |
|
|
|
|
|
|
|
|
|
|
|
155 |
def chat_tab():
|
156 |
def run_chat(
|
157 |
message: str,
|
|
|
164 |
session_id: str,
|
165 |
):
|
166 |
prompt = format_chat_prompt(message, history, instructions, user_name, bot_name)
|
|
|
167 |
payload = {
|
168 |
"endpoint": MODEL_NAME,
|
169 |
"data": {
|
170 |
"inputs": prompt,
|
171 |
"parameters": {
|
172 |
+
"max_new_tokens": 512,
|
173 |
"do_sample": True,
|
174 |
"top_p": top_p,
|
175 |
"stop": ["User:"],
|
176 |
+
"temperature" : temperature
|
177 |
},
|
178 |
"stream": True,
|
179 |
+
"session_id": session_id,
|
180 |
},
|
181 |
}
|
182 |
sess = requests.Session()
|
|
|
228 |
return ""
|
229 |
|
230 |
with gr.Column():
|
231 |
+
with gr.Row():
|
232 |
+
(
|
233 |
+
temperature,
|
234 |
+
top_p,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
235 |
instructions,
|
236 |
user_name,
|
237 |
bot_name,
|
|
|
|
|
238 |
session_id,
|
239 |
+
) = chat_accordion()
|
240 |
+
|
241 |
+
with gr.Column():
|
242 |
+
with gr.Blocks():
|
243 |
+
prompt_examples = [
|
244 |
+
["What is the capital of the United Arab Emirates?"],
|
245 |
+
["How can we reduce carbon emissions?"],
|
246 |
+
["Who is the inventor of the electric lamp?"],
|
247 |
+
["What is deep learning?"],
|
248 |
+
["What is the highest mountain?"],
|
249 |
+
]
|
250 |
+
gr.ChatInterface(
|
251 |
+
fn=run_chat,
|
252 |
+
chatbot=gr.Chatbot(
|
253 |
+
height=620,
|
254 |
+
render=False,
|
255 |
+
show_label=False,
|
256 |
+
rtl=False,
|
257 |
+
avatar_images=("images/user_icon.png", "images/bot_icon.png"),
|
258 |
+
),
|
259 |
+
textbox=gr.Textbox(
|
260 |
+
placeholder="Write your message here...",
|
261 |
+
render=False,
|
262 |
+
scale=7,
|
263 |
+
rtl=False,
|
264 |
+
),
|
265 |
+
examples=prompt_examples,
|
266 |
+
additional_inputs=[
|
267 |
+
instructions,
|
268 |
+
user_name,
|
269 |
+
bot_name,
|
270 |
+
temperature,
|
271 |
+
top_p,
|
272 |
+
session_id,
|
273 |
+
],
|
274 |
+
submit_btn="Send",
|
275 |
+
stop_btn="Stop",
|
276 |
+
retry_btn="🔄 Retry",
|
277 |
+
undo_btn="↩️ Delete",
|
278 |
+
clear_btn="🗑️ Clear",
|
279 |
+
)
|
280 |
+
|
281 |
|
282 |
|
283 |
def main():
|