Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,13 +1,12 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
import json, openai, os, time
|
|
|
|
| 3 |
from openai import OpenAI
|
| 4 |
|
| 5 |
-
_client = None
|
| 6 |
-
_assistant = None
|
| 7 |
-
_thread = None
|
| 8 |
|
| 9 |
def show_json(str, obj):
|
| 10 |
-
print(f"
|
| 11 |
|
| 12 |
def init_assistant():
|
| 13 |
global _client, _assistant, _thread
|
|
@@ -17,7 +16,7 @@ def init_assistant():
|
|
| 17 |
_assistant = _client.beta.assistants.create(
|
| 18 |
name="Math Tutor",
|
| 19 |
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
|
| 20 |
-
model="gpt-
|
| 21 |
)
|
| 22 |
|
| 23 |
_thread = _client.beta.threads.create()
|
|
@@ -44,34 +43,21 @@ def extract_content_values(data):
|
|
| 44 |
|
| 45 |
return content_values
|
| 46 |
|
| 47 |
-
def chat(message
|
| 48 |
global _client, _assistant, _thread
|
| 49 |
|
| 50 |
-
#history_openai_format = []
|
| 51 |
-
|
| 52 |
-
#for human, assistant in history:
|
| 53 |
-
# history_openai_format.append({"role": "user", "content": human})
|
| 54 |
-
# history_openai_format.append({"role": "assistant", "content":assistant})
|
| 55 |
-
|
| 56 |
-
#history_openai_format.append({"role": "user", "content": message})
|
| 57 |
-
|
| 58 |
-
#if len(history_openai_format) == 1:
|
| 59 |
if _client == None:
|
| 60 |
init_assistant()
|
| 61 |
|
| 62 |
#show_json("assistant", _assistant)
|
| 63 |
#show_json("thread", _thread)
|
| 64 |
-
|
| 65 |
-
#print("### history")
|
| 66 |
-
#print(len(history_openai_format))
|
| 67 |
-
#print(history_openai_format)
|
| 68 |
-
|
| 69 |
message = _client.beta.threads.messages.create(
|
| 70 |
role="user",
|
| 71 |
thread_id=_thread.id,
|
| 72 |
content=message,
|
| 73 |
)
|
| 74 |
-
|
| 75 |
#show_json("message", message)
|
| 76 |
|
| 77 |
run = _client.beta.threads.runs.create(
|
|
@@ -81,7 +67,7 @@ def chat(message, history):
|
|
| 81 |
|
| 82 |
run = wait_on_run(run)
|
| 83 |
|
| 84 |
-
show_json("run", run)
|
| 85 |
|
| 86 |
messages = _client.beta.threads.messages.list(thread_id=_thread.id)
|
| 87 |
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import json, openai, os, time
|
| 3 |
+
|
| 4 |
from openai import OpenAI
|
| 5 |
|
| 6 |
+
_client = None, _assistant = None, _thread = None
|
|
|
|
|
|
|
| 7 |
|
| 8 |
def show_json(str, obj):
|
| 9 |
+
print(f"=> {str}\n{json.loads(obj.model_dump_json())}")
|
| 10 |
|
| 11 |
def init_assistant():
|
| 12 |
global _client, _assistant, _thread
|
|
|
|
| 16 |
_assistant = _client.beta.assistants.create(
|
| 17 |
name="Math Tutor",
|
| 18 |
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
|
| 19 |
+
model="gpt-4-1106-preview",
|
| 20 |
)
|
| 21 |
|
| 22 |
_thread = _client.beta.threads.create()
|
|
|
|
| 43 |
|
| 44 |
return content_values
|
| 45 |
|
| 46 |
+
def chat(message):
|
| 47 |
global _client, _assistant, _thread
|
| 48 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
if _client == None:
|
| 50 |
init_assistant()
|
| 51 |
|
| 52 |
#show_json("assistant", _assistant)
|
| 53 |
#show_json("thread", _thread)
|
| 54 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
| 55 |
message = _client.beta.threads.messages.create(
|
| 56 |
role="user",
|
| 57 |
thread_id=_thread.id,
|
| 58 |
content=message,
|
| 59 |
)
|
| 60 |
+
|
| 61 |
#show_json("message", message)
|
| 62 |
|
| 63 |
run = _client.beta.threads.runs.create(
|
|
|
|
| 67 |
|
| 68 |
run = wait_on_run(run)
|
| 69 |
|
| 70 |
+
#show_json("run", run)
|
| 71 |
|
| 72 |
messages = _client.beta.threads.messages.list(thread_id=_thread.id)
|
| 73 |
|