Update app.py
Browse files
app.py
CHANGED
@@ -15,15 +15,15 @@ embed_model = SentenceTransformer("all-MiniLM-L6-v2")
|
|
15 |
|
16 |
# β
Use your OpenRouter API key from environment
|
17 |
API_KEY = os.environ.get("OPENROUTER_API_KEY")
|
18 |
-
MODEL = "deepseek/deepseek-
|
19 |
|
20 |
-
# β
Function to
|
21 |
def get_context(query, top_k=5):
|
22 |
query_vec = embed_model.encode([query])
|
23 |
D, I = index.search(np.array(query_vec), top_k)
|
24 |
return "\n".join([texts[i] for i in I[0]])
|
25 |
|
26 |
-
# β
|
27 |
def chat_fn(message, history):
|
28 |
headers = {
|
29 |
"Authorization": f"Bearer {API_KEY}",
|
@@ -56,78 +56,8 @@ def chat_fn(message, history):
|
|
56 |
|
57 |
return reply
|
58 |
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
with open(file.name, 'r', encoding='utf-8') as f:
|
65 |
-
new_texts = [line.strip() for line in f if line.strip()]
|
66 |
-
if not new_texts:
|
67 |
-
return "Uploaded file is empty."
|
68 |
-
|
69 |
-
# Load current texts
|
70 |
-
with open("texts.json", "r", encoding="utf-8") as f:
|
71 |
-
texts = json.load(f)
|
72 |
-
|
73 |
-
# Add new texts
|
74 |
-
texts.extend(new_texts)
|
75 |
-
with open("texts.json", "w", encoding="utf-8") as f:
|
76 |
-
json.dump(texts, f, ensure_ascii=False, indent=2)
|
77 |
-
|
78 |
-
# Embed new texts and add to FAISS index
|
79 |
-
new_vecs = embed_model.encode(new_texts)
|
80 |
-
index.add(np.array(new_vecs))
|
81 |
-
faiss.write_index(index, "faiss_index.bin")
|
82 |
-
|
83 |
-
return f"Successfully added {len(new_texts)} new texts."
|
84 |
-
except Exception as e:
|
85 |
-
return f"β Error during upload: {e}"
|
86 |
-
|
87 |
-
def save_chat_history(user, assistant):
|
88 |
-
try:
|
89 |
-
history_file = "chat_history.json"
|
90 |
-
if os.path.exists(history_file):
|
91 |
-
with open(history_file, "r", encoding="utf-8") as f:
|
92 |
-
chat_history = json.load(f)
|
93 |
-
else:
|
94 |
-
chat_history = []
|
95 |
-
chat_history.append({"user": user, "assistant": assistant})
|
96 |
-
with open(history_file, "w", encoding="utf-8") as f:
|
97 |
-
json.dump(chat_history, f, ensure_ascii=False, indent=2)
|
98 |
-
except Exception as e:
|
99 |
-
pass # Optionally log error
|
100 |
-
|
101 |
-
# β
Modified chat function for Gradio API compatibility
|
102 |
-
def chat_and_save_fn(message, history):
|
103 |
-
reply = chat_fn(message, history)
|
104 |
-
save_chat_history(message, reply)
|
105 |
-
return [reply] # β
Fix: return a list for API response
|
106 |
-
|
107 |
-
# β
Optional: file upload handler if used in UI (not exposed via API)
|
108 |
-
def upload_and_respond_fn(file, history):
|
109 |
-
status = upload_fn(file)
|
110 |
-
return [status] # β
Fix: ensure return is list for compatibility
|
111 |
-
|
112 |
-
# β
Gradio UI (for local test / HF Space frontend)
|
113 |
-
with gr.Blocks(theme="soft") as app:
|
114 |
-
gr.Markdown("# CODEX MIRXA KAMRAN\nChat with AI MODEL trained By Mirxa Kamran")
|
115 |
-
chatbot = gr.Chatbot()
|
116 |
-
with gr.Row():
|
117 |
-
msg = gr.Textbox(placeholder="Type your message here...")
|
118 |
-
upload = gr.File(label="Upload .txt", file_types=[".txt"])
|
119 |
-
send_btn = gr.Button("Send")
|
120 |
-
|
121 |
-
def user_send(user_message, chat_history, file):
|
122 |
-
if file is not None:
|
123 |
-
return upload_and_respond_fn(file, chat_history), "", None
|
124 |
-
if user_message.strip() == "":
|
125 |
-
return chat_history, "", None
|
126 |
-
reply = chat_and_save_fn(user_message, chat_history)[0] # unpack from list
|
127 |
-
chat_history = chat_history + [[user_message, reply]]
|
128 |
-
return chat_history, "", None
|
129 |
-
|
130 |
-
send_btn.click(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
|
131 |
-
msg.submit(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
|
132 |
-
|
133 |
-
app.launch()
|
|
|
15 |
|
16 |
# β
Use your OpenRouter API key from environment
|
17 |
API_KEY = os.environ.get("OPENROUTER_API_KEY")
|
18 |
+
MODEL = "deepseek/deepseek-r1-0528:free"
|
19 |
|
20 |
+
# β
Function to get relevant context
|
21 |
def get_context(query, top_k=5):
|
22 |
query_vec = embed_model.encode([query])
|
23 |
D, I = index.search(np.array(query_vec), top_k)
|
24 |
return "\n".join([texts[i] for i in I[0]])
|
25 |
|
26 |
+
# β
Chat function
|
27 |
def chat_fn(message, history):
|
28 |
headers = {
|
29 |
"Authorization": f"Bearer {API_KEY}",
|
|
|
56 |
|
57 |
return reply
|
58 |
|
59 |
+
# β
Launch Gradio chat without titles or descriptions (clean look)
|
60 |
+
gr.ChatInterface(
|
61 |
+
fn=chat_fn,
|
62 |
+
theme="soft"
|
63 |
+
).launch(inbrowser=True, share=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|