mirxakamran893 commited on
Commit
252634f
Β·
verified Β·
1 Parent(s): de1ee1b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -8
app.py CHANGED
@@ -6,16 +6,16 @@ import numpy as np
6
  import json
7
  from sentence_transformers import SentenceTransformer
8
 
9
- # βœ… Load RAG-related files
10
  with open("texts.json", "r", encoding="utf-8") as f:
11
  texts = json.load(f)
12
 
13
  index = faiss.read_index("faiss_index.bin")
14
  embed_model = SentenceTransformer("all-MiniLM-L6-v2")
15
 
16
- # βœ… API setup
17
- API_KEY = os.environ.get("OPENROUTER_API_KEY")
18
- MODEL = "mistralai/mistral-7b-instruct:free"
19
 
20
  # βœ… Context retriever
21
  def get_context(query, top_k=5):
@@ -24,7 +24,7 @@ def get_context(query, top_k=5):
24
  context_chunks = [texts[i] for i in I[0] if i < len(texts)]
25
  return "\n".join(context_chunks).strip()
26
 
27
- # βœ… Chat function with strict context use
28
  def chat_fn(message, history):
29
  context = get_context(message)
30
 
@@ -51,11 +51,13 @@ def chat_fn(message, history):
51
 
52
  payload = {
53
  "model": MODEL,
54
- "messages": messages
 
 
55
  }
56
 
57
  try:
58
- response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
59
  response.raise_for_status()
60
  reply = response.json()["choices"][0]["message"]["content"]
61
  except Exception as e:
@@ -63,7 +65,7 @@ def chat_fn(message, history):
63
 
64
  return reply
65
 
66
- # βœ… Gradio interface (clean look, no header/footer)
67
  chat_ui = gr.ChatInterface(
68
  fn=chat_fn,
69
  title="",
 
6
  import json
7
  from sentence_transformers import SentenceTransformer
8
 
9
+ # βœ… Load data
10
  with open("texts.json", "r", encoding="utf-8") as f:
11
  texts = json.load(f)
12
 
13
  index = faiss.read_index("faiss_index.bin")
14
  embed_model = SentenceTransformer("all-MiniLM-L6-v2")
15
 
16
+ # βœ… Together AI Setup
17
+ API_KEY = os.environ.get("TOGETHER_API_KEY") or "76615a7c686e20c0ee8cae288fddc7ab35ae51e768abc45b3defb5b1850d3dd0"
18
+ MODEL = "deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free"
19
 
20
  # βœ… Context retriever
21
  def get_context(query, top_k=5):
 
24
  context_chunks = [texts[i] for i in I[0] if i < len(texts)]
25
  return "\n".join(context_chunks).strip()
26
 
27
+ # βœ… Chat function using Together API
28
  def chat_fn(message, history):
29
  context = get_context(message)
30
 
 
51
 
52
  payload = {
53
  "model": MODEL,
54
+ "messages": messages,
55
+ "temperature": 0.7,
56
+ "max_tokens": 512
57
  }
58
 
59
  try:
60
+ response = requests.post("https://api.together.xyz/v1/chat/completions", headers=headers, json=payload)
61
  response.raise_for_status()
62
  reply = response.json()["choices"][0]["message"]["content"]
63
  except Exception as e:
 
65
 
66
  return reply
67
 
68
+ # βœ… Gradio chat interface
69
  chat_ui = gr.ChatInterface(
70
  fn=chat_fn,
71
  title="",