mirxakamran893 commited on
Commit
3868931
Β·
verified Β·
1 Parent(s): b3766ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -27
app.py CHANGED
@@ -5,32 +5,34 @@ import faiss
5
  import numpy as np
6
  import json
7
  from sentence_transformers import SentenceTransformer
8
- from datetime import datetime
9
 
10
- # βœ… Load RAG context
11
  with open("texts.json", "r", encoding="utf-8") as f:
12
  texts = json.load(f)
13
 
14
  index = faiss.read_index("faiss_index.bin")
15
  embed_model = SentenceTransformer("all-MiniLM-L6-v2")
16
 
17
- # βœ… API setup
18
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
19
  MODEL = "meta-llama/llama-3.3-8b-instruct:free"
20
 
21
- # βœ… Get relevant context
22
- def get_context(query, top_k=5, threshold=0.45):
23
  query_vec = embed_model.encode([query])
24
  D, I = index.search(np.array(query_vec), top_k)
25
- if all(score < threshold for score in D[0]):
 
 
26
  return None
 
27
  return "\n".join([texts[i] for i in I[0]])
28
 
29
- # βœ… Chat logic
30
  def chat_fn(message, history):
31
  context = get_context(message)
32
  if context is None:
33
- return history[:-1] + [(message, "❌ Sorry! I cannot answer that.")], gr.update(visible=True)
34
 
35
  headers = {
36
  "Authorization": f"Bearer {API_KEY}",
@@ -40,7 +42,7 @@ def chat_fn(message, history):
40
  messages = [
41
  {
42
  "role": "system",
43
- "content": f"You are a helpful assistant. Only answer using this context:\n{context}\nIf the answer is not in the context, reply with 'Sorry! I cannot answer that.'"
44
  }
45
  ]
46
 
@@ -62,16 +64,7 @@ def chat_fn(message, history):
62
  except Exception as e:
63
  reply = f"❌ Error: {e}"
64
 
65
- return history[:-1] + [(message, reply)], gr.update(visible=True)
66
-
67
- # βœ… Export logs to file
68
- def export_logs(history):
69
- timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
70
- filename = f"chat_log_{timestamp}.txt"
71
- log_text = "\n\n".join([f"You: {q}\nBot: {a}" for q, a in history])
72
- with open(filename, "w", encoding="utf-8") as f:
73
- f.write(log_text)
74
- return filename
75
 
76
  # βœ… UI
77
  with gr.Blocks(css="""
@@ -82,20 +75,17 @@ with gr.Blocks(css="""
82
  state = gr.State([])
83
 
84
  with gr.Row():
85
- msg = gr.Textbox(placeholder="Type your message and press enter...", scale=8)
86
- export_btn = gr.Button("Export Chat", scale=1, visible=False)
87
 
88
- # Typing simulation
89
  def user_send(message, history):
90
- return "", history + [(message, "⏳ ...")], gr.update(visible=False)
91
 
92
  def complete_chat(message, history):
93
  return chat_fn(message, history)
94
 
95
- msg.submit(user_send, [msg, state], [msg, chatbot, export_btn]).then(
96
- complete_chat, [msg, state], [chatbot, export_btn]
97
  )
98
 
99
- export_btn.click(fn=export_logs, inputs=[state], outputs=gr.File(label="Download Chat Log"))
100
-
101
  demo.launch()
 
5
  import numpy as np
6
  import json
7
  from sentence_transformers import SentenceTransformer
 
8
 
9
+ # βœ… Load context
10
  with open("texts.json", "r", encoding="utf-8") as f:
11
  texts = json.load(f)
12
 
13
  index = faiss.read_index("faiss_index.bin")
14
  embed_model = SentenceTransformer("all-MiniLM-L6-v2")
15
 
16
+ # βœ… API Setup
17
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
18
  MODEL = "meta-llama/llama-3.3-8b-instruct:free"
19
 
20
+ # βœ… Context search
21
+ def get_context(query, top_k=5, min_score=0.2):
22
  query_vec = embed_model.encode([query])
23
  D, I = index.search(np.array(query_vec), top_k)
24
+
25
+ # If top score is too low, reject
26
+ if D[0][0] < min_score:
27
  return None
28
+
29
  return "\n".join([texts[i] for i in I[0]])
30
 
31
+ # βœ… Chat function
32
  def chat_fn(message, history):
33
  context = get_context(message)
34
  if context is None:
35
+ return history[:-1] + [(message, "❌ Sorry! I cannot answer that.")]
36
 
37
  headers = {
38
  "Authorization": f"Bearer {API_KEY}",
 
42
  messages = [
43
  {
44
  "role": "system",
45
+ "content": f"You are a helpful assistant. Use ONLY this context to answer:\n{context}\nIf not found in context, reply: 'Sorry! I cannot answer that.'"
46
  }
47
  ]
48
 
 
64
  except Exception as e:
65
  reply = f"❌ Error: {e}"
66
 
67
+ return history[:-1] + [(message, reply)]
 
 
 
 
 
 
 
 
 
68
 
69
  # βœ… UI
70
  with gr.Blocks(css="""
 
75
  state = gr.State([])
76
 
77
  with gr.Row():
78
+ msg = gr.Textbox(placeholder="Type your message and press enter...", scale=9)
 
79
 
80
+ # Typing sim
81
  def user_send(message, history):
82
+ return "", history + [(message, "⏳ ...")]
83
 
84
  def complete_chat(message, history):
85
  return chat_fn(message, history)
86
 
87
+ msg.submit(user_send, [msg, state], [msg, chatbot]).then(
88
+ complete_chat, [msg, state], [chatbot]
89
  )
90
 
 
 
91
  demo.launch()