bstraehle commited on
Commit
12c2b66
·
verified ·
1 Parent(s): 7ec6e7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -33
app.py CHANGED
@@ -2,42 +2,36 @@ import gradio as gr
2
  import json, openai, os, time
3
  from openai import OpenAI
4
 
5
- client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
6
- assistant2 = client.beta.assistants.create(
7
- name="Math Tutor",
8
- instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
9
- model="gpt-4-1106-preview",
10
- )
11
- thread = None
12
 
13
  def show_json(str, obj):
14
- print(f"### {str}")
15
- #print(json.loads(obj.model_dump_json()))
16
- print(obj)
17
 
18
  def init_assistant():
19
- global client, assistant2, thread
20
 
21
- #client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
22
 
23
- #assistant2 = client.beta.assistants.create(
24
- # name="Math Tutor",
25
- # instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
26
- # model="gpt-4-1106-preview",
27
- #)
28
 
29
- thread = client.beta.threads.create()
30
 
31
  def wait_on_run(client, run, thread):
32
  while run.status == "queued" or run.status == "in_progress":
33
  run = client.beta.threads.runs.retrieve(
34
- thread_id=thread.id,
35
  run_id=run.id,
 
36
  )
37
  time.sleep(0.25)
38
  return run
39
 
40
- def extract_content_value(data):
41
  content_values = []
42
  for item in data.data:
43
  for content in item.content:
@@ -46,9 +40,9 @@ def extract_content_value(data):
46
  return content_values
47
 
48
  def chat(message, history):
49
- global client
50
- global assistant2
51
- global thread
52
 
53
  history_openai_format = []
54
 
@@ -61,37 +55,35 @@ def chat(message, history):
61
  if len(history_openai_format) == 1:
62
  init_assistant()
63
 
64
- show_json("assistant", assistant2)
65
- show_json("thread", thread)
66
 
67
  #print("### history")
68
  #print(len(history_openai_format))
69
  #print(history_openai_format)
70
 
71
  message = client.beta.threads.messages.create(
72
- thread_id=thread.id,
73
  role="user",
74
- content=message,
 
75
  )
76
 
77
  #show_json("message", message)
78
 
79
  run = client.beta.threads.runs.create(
80
- thread_id=thread.id,
81
- assistant_id=assistant2.id,
82
  )
83
 
84
- #show_json("run", run)
85
-
86
  run = wait_on_run(client, run, thread)
87
 
88
  #show_json("run", run)
89
 
90
- messages = client.beta.threads.messages.list(thread_id=thread.id)
91
 
92
  #show_json("messages", messages)
93
 
94
- return extract_content_value(messages)[0]
95
 
96
  gr.ChatInterface(
97
  chat,
 
2
  import json, openai, os, time
3
  from openai import OpenAI
4
 
5
+ _client = None
6
+ _assistant = None
7
+ _thread = None
 
 
 
 
8
 
9
  def show_json(str, obj):
10
+ print(f"===> {str}\n{json.loads(obj.model_dump_json())}")
 
 
11
 
12
  def init_assistant():
13
+ global _client, _assistant, _thread
14
 
15
+ _client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY"))
16
 
17
+ _assistant = client.beta.assistants.create(
18
+ name="Math Tutor",
19
+ instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
20
+ model="gpt-4-1106-preview",
21
+ )
22
 
23
+ _thread = client.beta.threads.create()
24
 
25
  def wait_on_run(client, run, thread):
26
  while run.status == "queued" or run.status == "in_progress":
27
  run = client.beta.threads.runs.retrieve(
 
28
  run_id=run.id,
29
+ thread_id=thread.id,
30
  )
31
  time.sleep(0.25)
32
  return run
33
 
34
+ def extract_content_values(data):
35
  content_values = []
36
  for item in data.data:
37
  for content in item.content:
 
40
  return content_values
41
 
42
  def chat(message, history):
43
+ global _client
44
+ global _assistant
45
+ global _thread
46
 
47
  history_openai_format = []
48
 
 
55
  if len(history_openai_format) == 1:
56
  init_assistant()
57
 
58
+ show_json("assistant", _assistant)
59
+ show_json("thread", _thread)
60
 
61
  #print("### history")
62
  #print(len(history_openai_format))
63
  #print(history_openai_format)
64
 
65
  message = client.beta.threads.messages.create(
 
66
  role="user",
67
+ thread_id=_thread.id,
68
+ content=history_openai_format,
69
  )
70
 
71
  #show_json("message", message)
72
 
73
  run = client.beta.threads.runs.create(
74
+ assistant_id=_assistant.id,
75
+ thread_id=_thread.id,
76
  )
77
 
 
 
78
  run = wait_on_run(client, run, thread)
79
 
80
  #show_json("run", run)
81
 
82
+ messages = client.beta.threads.messages.list(thread_id=_thread.id)
83
 
84
  #show_json("messages", messages)
85
 
86
+ return extract_content_values(messages)[0]
87
 
88
  gr.ChatInterface(
89
  chat,