mirxakamran893's picture
Update app.py
270b445 verified
raw
history blame
4.63 kB
import gradio as gr
import requests
import os
import faiss
import numpy as np
import json
from sentence_transformers import SentenceTransformer
# βœ… Load RAG-related files
with open("texts.json", "r", encoding="utf-8") as f:
texts = json.load(f)
index = faiss.read_index("faiss_index.bin")
embed_model = SentenceTransformer("all-MiniLM-L6-v2")
# βœ… Use your OpenRouter API key from environment
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "deepseek/deepseek-chat-v3-0324:free"
# βœ… Function to search relevant context
def get_context(query, top_k=5):
query_vec = embed_model.encode([query])
D, I = index.search(np.array(query_vec), top_k)
return "\n".join([texts[i] for i in I[0]])
# βœ… Function to handle chat
def chat_fn(message, history):
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
context = get_context(message)
messages = [
{"role": "system", "content": "You are a helpful assistant. Use the following context to answer: " + context}
]
for user, assistant in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
payload = {
"model": MODEL,
"messages": messages
}
try:
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
response.raise_for_status()
reply = response.json()["choices"][0]["message"]["content"]
except Exception as e:
reply = f"❌ Error: {e}"
return reply
def upload_fn(file):
if file is None:
return "No file uploaded."
try:
# Read uploaded file
with open(file.name, 'r', encoding='utf-8') as f:
new_texts = [line.strip() for line in f if line.strip()]
if not new_texts:
return "Uploaded file is empty."
# Load current texts
with open("texts.json", "r", encoding="utf-8") as f:
texts = json.load(f)
# Add new texts
texts.extend(new_texts)
with open("texts.json", "w", encoding="utf-8") as f:
json.dump(texts, f, ensure_ascii=False, indent=2)
# Embed new texts and add to FAISS index
new_vecs = embed_model.encode(new_texts)
index.add(np.array(new_vecs))
faiss.write_index(index, "faiss_index.bin")
return f"Successfully added {len(new_texts)} new texts."
except Exception as e:
return f"❌ Error during upload: {e}"
def save_chat_history(user, assistant):
try:
history_file = "chat_history.json"
if os.path.exists(history_file):
with open(history_file, "r", encoding="utf-8") as f:
chat_history = json.load(f)
else:
chat_history = []
chat_history.append({"user": user, "assistant": assistant})
with open(history_file, "w", encoding="utf-8") as f:
json.dump(chat_history, f, ensure_ascii=False, indent=2)
except Exception as e:
pass # Optionally log error
# Modified chat function to save history
def chat_and_save_fn(message, history):
reply = chat_fn(message, history)
save_chat_history(message, reply)
return reply
# Modified upload function to show status in chat
def upload_and_respond_fn(file, history):
status = upload_fn(file)
# Show upload status as a chat message from assistant
if status.startswith("Successfully"):
save_chat_history("[File Uploaded]", status)
return history + [["[File Uploaded]", status]]
with gr.Blocks(theme="soft") as app:
gr.Markdown("# CODEX MIRXA KAMRAN\nChat with AI MODEL trained By Mirxa Kamran")
chatbot = gr.Chatbot()
with gr.Row():
msg = gr.Textbox(placeholder="Type your message here...")
upload = gr.File(label="Upload .txt", file_types=[".txt"])
send_btn = gr.Button("Send")
def user_send(user_message, chat_history, file):
if file is not None:
# Handle file upload and show status in chat
return upload_and_respond_fn(file, chat_history), "", None
if user_message.strip() == "":
return chat_history, "", None
reply = chat_and_save_fn(user_message, chat_history)
chat_history = chat_history + [[user_message, reply]]
return chat_history, "", None
send_btn.click(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
msg.submit(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
app.launch()