File size: 4,633 Bytes
51ce63b d1406e8 51ce63b d1406e8 270b445 efbfb8b bb68426 270b445 51ce63b 270b445 efbfb8b 270b445 bb68426 51ce63b 270b445 bb68426 270b445 d1406e8 270b445 efa5f27 270b445 51ce63b d1406e8 270b445 d1406e8 51ce63b 270b445 51ce63b bb68426 51ce63b 270b445 51ce63b efa5f27 51ce63b 270b445 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
import gradio as gr
import requests
import os
import faiss
import numpy as np
import json
from sentence_transformers import SentenceTransformer
# β
Load RAG-related files
with open("texts.json", "r", encoding="utf-8") as f:
texts = json.load(f)
index = faiss.read_index("faiss_index.bin")
embed_model = SentenceTransformer("all-MiniLM-L6-v2")
# β
Use your OpenRouter API key from environment
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "deepseek/deepseek-chat-v3-0324:free"
# β
Function to search relevant context
def get_context(query, top_k=5):
query_vec = embed_model.encode([query])
D, I = index.search(np.array(query_vec), top_k)
return "\n".join([texts[i] for i in I[0]])
# β
Function to handle chat
def chat_fn(message, history):
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
context = get_context(message)
messages = [
{"role": "system", "content": "You are a helpful assistant. Use the following context to answer: " + context}
]
for user, assistant in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
payload = {
"model": MODEL,
"messages": messages
}
try:
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
response.raise_for_status()
reply = response.json()["choices"][0]["message"]["content"]
except Exception as e:
reply = f"β Error: {e}"
return reply
def upload_fn(file):
if file is None:
return "No file uploaded."
try:
# Read uploaded file
with open(file.name, 'r', encoding='utf-8') as f:
new_texts = [line.strip() for line in f if line.strip()]
if not new_texts:
return "Uploaded file is empty."
# Load current texts
with open("texts.json", "r", encoding="utf-8") as f:
texts = json.load(f)
# Add new texts
texts.extend(new_texts)
with open("texts.json", "w", encoding="utf-8") as f:
json.dump(texts, f, ensure_ascii=False, indent=2)
# Embed new texts and add to FAISS index
new_vecs = embed_model.encode(new_texts)
index.add(np.array(new_vecs))
faiss.write_index(index, "faiss_index.bin")
return f"Successfully added {len(new_texts)} new texts."
except Exception as e:
return f"β Error during upload: {e}"
def save_chat_history(user, assistant):
try:
history_file = "chat_history.json"
if os.path.exists(history_file):
with open(history_file, "r", encoding="utf-8") as f:
chat_history = json.load(f)
else:
chat_history = []
chat_history.append({"user": user, "assistant": assistant})
with open(history_file, "w", encoding="utf-8") as f:
json.dump(chat_history, f, ensure_ascii=False, indent=2)
except Exception as e:
pass # Optionally log error
# Modified chat function to save history
def chat_and_save_fn(message, history):
reply = chat_fn(message, history)
save_chat_history(message, reply)
return reply
# Modified upload function to show status in chat
def upload_and_respond_fn(file, history):
status = upload_fn(file)
# Show upload status as a chat message from assistant
if status.startswith("Successfully"):
save_chat_history("[File Uploaded]", status)
return history + [["[File Uploaded]", status]]
with gr.Blocks(theme="soft") as app:
gr.Markdown("# CODEX MIRXA KAMRAN\nChat with AI MODEL trained By Mirxa Kamran")
chatbot = gr.Chatbot()
with gr.Row():
msg = gr.Textbox(placeholder="Type your message here...")
upload = gr.File(label="Upload .txt", file_types=[".txt"])
send_btn = gr.Button("Send")
def user_send(user_message, chat_history, file):
if file is not None:
# Handle file upload and show status in chat
return upload_and_respond_fn(file, chat_history), "", None
if user_message.strip() == "":
return chat_history, "", None
reply = chat_and_save_fn(user_message, chat_history)
chat_history = chat_history + [[user_message, reply]]
return chat_history, "", None
send_btn.click(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
msg.submit(user_send, [msg, chatbot, upload], [chatbot, msg, upload])
app.launch() |