import gradio as gr import requests import os import faiss import numpy as np import json from sentence_transformers import SentenceTransformer from datetime import datetime # ✅ Load RAG context with open("texts.json", "r", encoding="utf-8") as f: texts = json.load(f) index = faiss.read_index("faiss_index.bin") embed_model = SentenceTransformer("all-MiniLM-L6-v2") # ✅ API setup API_KEY = os.environ.get("OPENROUTER_API_KEY") MODEL = "qwen/qwen-2.5-coder-32b-instruct:free" # ✅ Get relevant context def get_context(query, top_k=5, threshold=0.45): query_vec = embed_model.encode([query]) D, I = index.search(np.array(query_vec), top_k) if all(score < threshold for score in D[0]): return None return "\n".join([texts[i] for i in I[0]]) # ✅ Chat logic def chat_fn(message, history): context = get_context(message) if context is None: return history[:-1] + [(message, "❌ Sorry! I cannot answer that.")], gr.update(visible=True) headers = { "Authorization": f"Bearer {API_KEY}", "Content-Type": "application/json" } messages = [ { "role": "system", "content": f"You are a helpful assistant. Only answer using this context:\n{context}\nIf the answer is not in the context, reply with 'Sorry! I cannot answer that.'" } ] for user, assistant in history[:-1]: messages.append({"role": "user", "content": user}) messages.append({"role": "assistant", "content": assistant}) messages.append({"role": "user", "content": message}) payload = { "model": MODEL, "messages": messages } try: response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload) response.raise_for_status() reply = response.json()["choices"][0]["message"]["content"] except Exception as e: reply = f"❌ Error: {e}" return history[:-1] + [(message, reply)], gr.update(visible=True) # ✅ Export logs to file def export_logs(history): timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") filename = f"chat_log_{timestamp}.txt" log_text = "\n\n".join([f"You: {q}\nBot: {a}" for q, a in history]) with open(filename, "w", encoding="utf-8") as f: f.write(log_text) return filename # ✅ UI with gr.Blocks(css=""" .footer {display: none !important;} #chat-window {height: 500px !important; overflow-y: auto;} """) as demo: chatbot = gr.Chatbot(elem_id="chat-window") state = gr.State([]) with gr.Row(): msg = gr.Textbox(placeholder="Type your message and press enter...", scale=8) export_btn = gr.Button("Export Chat", scale=1, visible=False) # Typing simulation def user_send(message, history): return "", history + [(message, "⏳ ...")], gr.update(visible=False) def complete_chat(message, history): return chat_fn(message, history) msg.submit(user_send, [msg, state], [msg, chatbot, export_btn]).then( complete_chat, [msg, state], [chatbot, export_btn] ) export_btn.click(fn=export_logs, inputs=[state], outputs=gr.File(label="Download Chat Log")) demo.launch()