mirxakamran893 commited on
Commit
270b445
Β·
verified Β·
1 Parent(s): 0dca0ed

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +107 -47
app.py CHANGED
@@ -1,60 +1,47 @@
1
  import gradio as gr
2
  import requests
3
- import json
4
  import os
5
  import faiss
6
  import numpy as np
 
7
  from sentence_transformers import SentenceTransformer
8
 
9
- # Load files
10
- index = faiss.read_index("faiss_index.bin")
11
- with open("texts.json", "r") as f:
12
  texts = json.load(f)
13
 
14
- # Embedding model
15
- model = SentenceTransformer("all-MiniLM-L6-v2")
16
 
17
- # API key
18
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
19
  MODEL = "deepseek/deepseek-chat-v3-0324:free"
20
 
21
- # Function: Get relevant chunks
22
- def get_relevant_context(query, k=5):
23
- query_vector = model.encode([query])
24
- scores, indices = index.search(np.array(query_vector), k)
25
- return [texts[i] for i in indices[0] if i < len(texts)]
26
 
27
- # Chatbot logic
28
- def chat_with_data(message, history):
29
- greetings = ["hi", "hello", "hey", "salam", "assalamualaikum"]
30
- message_lower = message.lower().strip()
31
 
32
- if any(greet in message_lower for greet in greetings):
33
- return "πŸ‘‹ Hello! How can I assist you regarding LogiqCurve today?"
34
-
35
- context = get_relevant_context(message)
36
- if not context or all(len(c.strip()) < 10 for c in context):
37
- return "❌ Sorry, I can only answer questions based on content from LogiqCurve.com."
38
-
39
- context_text = "\n".join(context)
40
 
41
- prompt = (
42
- f"You are a helpful assistant for LogiqCurve.\n"
43
- f"ONLY use the context below to answer the user. Do not use any outside knowledge.\n\n"
44
- f"Context:\n{context_text}\n\n"
45
- f"User question: {message}\n\n"
46
- f"Answer strictly using the context above."
47
- )
48
 
49
  messages = [
50
- {"role": "system", "content": "You are a strict assistant who only answers using provided context."},
51
- {"role": "user", "content": prompt}
52
  ]
53
 
54
- headers = {
55
- "Authorization": f"Bearer {API_KEY}",
56
- "Content-Type": "application/json"
57
- }
 
58
 
59
  payload = {
60
  "model": MODEL,
@@ -62,18 +49,91 @@ def chat_with_data(message, history):
62
  }
63
 
64
  try:
65
- res = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
66
- res.raise_for_status()
67
- reply = res.json()["choices"][0]["message"]["content"]
68
  except Exception as e:
69
  reply = f"❌ Error: {e}"
70
 
71
  return reply
72
 
73
- # UI
74
- gr.ChatInterface(
75
- fn=chat_with_data,
76
- title="MK Private Assistant",
77
- description="Ask me about LogiqCurve services. I respond only using website data.",
78
- theme="soft"
79
- ).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  import requests
 
3
  import os
4
  import faiss
5
  import numpy as np
6
+ import json
7
  from sentence_transformers import SentenceTransformer
8
 
9
+ # βœ… Load RAG-related files
10
+ with open("texts.json", "r", encoding="utf-8") as f:
 
11
  texts = json.load(f)
12
 
13
+ index = faiss.read_index("faiss_index.bin")
14
+ embed_model = SentenceTransformer("all-MiniLM-L6-v2")
15
 
16
+ # βœ… Use your OpenRouter API key from environment
17
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
18
  MODEL = "deepseek/deepseek-chat-v3-0324:free"
19
 
20
+ # βœ… Function to search relevant context
21
+ def get_context(query, top_k=5):
22
+ query_vec = embed_model.encode([query])
23
+ D, I = index.search(np.array(query_vec), top_k)
24
+ return "\n".join([texts[i] for i in I[0]])
25
 
26
+ # βœ… Function to handle chat
 
 
 
27
 
28
+ def chat_fn(message, history):
29
+ headers = {
30
+ "Authorization": f"Bearer {API_KEY}",
31
+ "Content-Type": "application/json"
32
+ }
 
 
 
33
 
34
+ context = get_context(message)
 
 
 
 
 
 
35
 
36
  messages = [
37
+ {"role": "system", "content": "You are a helpful assistant. Use the following context to answer: " + context}
 
38
  ]
39
 
40
+ for user, assistant in history:
41
+ messages.append({"role": "user", "content": user})
42
+ messages.append({"role": "assistant", "content": assistant})
43
+
44
+ messages.append({"role": "user", "content": message})
45
 
46
  payload = {
47
  "model": MODEL,
 
49
  }
50
 
51
  try:
52
+ response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
53
+ response.raise_for_status()
54
+ reply = response.json()["choices"][0]["message"]["content"]
55
  except Exception as e:
56
  reply = f"❌ Error: {e}"
57
 
58
  return reply
59
 
60
+ def upload_fn(file):
61
+ if file is None:
62
+ return "No file uploaded."
63
+ try:
64
+ # Read uploaded file
65
+ with open(file.name, 'r', encoding='utf-8') as f:
66
+ new_texts = [line.strip() for line in f if line.strip()]
67
+ if not new_texts:
68
+ return "Uploaded file is empty."
69
+
70
+ # Load current texts
71
+ with open("texts.json", "r", encoding="utf-8") as f:
72
+ texts = json.load(f)
73
+
74
+ # Add new texts
75
+ texts.extend(new_texts)
76
+ with open("texts.json", "w", encoding="utf-8") as f:
77
+ json.dump(texts, f, ensure_ascii=False, indent=2)
78
+
79
+ # Embed new texts and add to FAISS index
80
+ new_vecs = embed_model.encode(new_texts)
81
+ index.add(np.array(new_vecs))
82
+ faiss.write_index(index, "faiss_index.bin")
83
+
84
+ return f"Successfully added {len(new_texts)} new texts."
85
+ except Exception as e:
86
+ return f"❌ Error during upload: {e}"
87
+
88
+ def save_chat_history(user, assistant):
89
+ try:
90
+ history_file = "chat_history.json"
91
+ if os.path.exists(history_file):
92
+ with open(history_file, "r", encoding="utf-8") as f:
93
+ chat_history = json.load(f)
94
+ else:
95
+ chat_history = []
96
+ chat_history.append({"user": user, "assistant": assistant})
97
+ with open(history_file, "w", encoding="utf-8") as f:
98
+ json.dump(chat_history, f, ensure_ascii=False, indent=2)
99
+ except Exception as e:
100
+ pass # Optionally log error
101
+
102
+ # Modified chat function to save history
103
+
104
+ def chat_and_save_fn(message, history):
105
+ reply = chat_fn(message, history)
106
+ save_chat_history(message, reply)
107
+ return reply
108
+
109
+ # Modified upload function to show status in chat
110
+
111
+ def upload_and_respond_fn(file, history):
112
+ status = upload_fn(file)
113
+ # Show upload status as a chat message from assistant
114
+ if status.startswith("Successfully"):
115
+ save_chat_history("[File Uploaded]", status)
116
+ return history + [["[File Uploaded]", status]]
117
+
118
+ with gr.Blocks(theme="soft") as app:
119
+ gr.Markdown("# CODEX MIRXA KAMRAN\nChat with AI MODEL trained By Mirxa Kamran")
120
+ chatbot = gr.Chatbot()
121
+ with gr.Row():
122
+ msg = gr.Textbox(placeholder="Type your message here...")
123
+ upload = gr.File(label="Upload .txt", file_types=[".txt"])
124
+ send_btn = gr.Button("Send")
125
+
126
+ def user_send(user_message, chat_history, file):
127
+ if file is not None:
128
+ # Handle file upload and show status in chat
129
+ return upload_and_respond_fn(file, chat_history), "", None
130
+ if user_message.strip() == "":
131
+ return chat_history, "", None
132
+ reply = chat_and_save_fn(user_message, chat_history)
133
+ chat_history = chat_history + [[user_message, reply]]
134
+ return chat_history, "", None
135
+
136
+ send_btn.click(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
137
+ msg.submit(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
138
+
139
+ app.launch()