mirxakamran893's picture
Update app.py
4b481cd verified
raw
history blame
3.57 kB
import gradio as gr
import requests
import os
import faiss
import numpy as np
import json
from sentence_transformers import SentenceTransformer
# Load RAG content
with open("texts.json", "r", encoding="utf-8") as f:
texts = json.load(f)
index = faiss.read_index("faiss_index.bin")
embed_model = SentenceTransformer("all-MiniLM-L6-v2")
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "qwen/qwen-2.5-coder-32b-instruct:free"
# Search relevant context
def get_context(query, top_k=5):
query_vec = embed_model.encode([query])
D, I = index.search(np.array(query_vec), top_k)
return "\n".join([texts[i] for i in I[0]])
# Chat function
def chat_fn(message, history):
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
context = get_context(message)
system_prompt = f"""You are Codex Assistant by LogIQ Curve β€” a helpful, friendly AI assistant.
Talk like a smart human. Use the context below to answer:
{context}
"""
messages = [{"role": "system", "content": system_prompt}]
for user, assistant in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
payload = {
"model": MODEL,
"messages": messages
}
try:
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
response.raise_for_status()
reply = response.json()["choices"][0]["message"]["content"]
except Exception as e:
reply = f"❌ Error: {e}"
return reply
# Custom CSS for full screen & styling
custom_css = """
* {
font-family: 'Segoe UI', sans-serif;
}
footer, button[data-testid="settings-button"] {
display: none !important;
}
#chat-container {
display: flex;
flex-direction: column;
height: 100vh;
}
#chat-window {
flex: 1;
overflow-y: auto;
padding: 16px;
}
#input-row {
display: flex;
padding: 10px;
border-top: 1px solid #ddd;
}
textarea {
flex: 1;
resize: none;
padding: 10px;
font-size: 1rem;
}
button {
margin-left: 8px;
}
.message.user {
background-color: #daf0ff;
padding: 10px 15px;
border-radius: 10px;
align-self: flex-end;
margin: 5px 0;
max-width: 80%;
}
.message.ai {
background-color: #f0f0f0;
padding: 10px 15px;
border-radius: 10px;
align-self: flex-start;
margin: 5px 0;
max-width: 80%;
}
"""
with gr.Blocks(css=custom_css) as demo:
chatbot_state = gr.State([])
with gr.Column(elem_id="chat-container"):
chatbox = gr.HTML('<div id="chat-window"></div>', elem_id="chat-window")
with gr.Row(elem_id="input-row"):
msg = gr.Textbox(placeholder="Type your message here...", lines=1, scale=8, show_label=False)
send = gr.Button("Send", scale=1)
def update_chat(message, state):
state.append(("user", message))
response = chat_fn(message, [(u, a) for i, (u, a) in enumerate(zip(state[::2], state[1::2]))])
state.append(("ai", response))
html = ""
for role, content in state:
bubble_class = "user" if role == "user" else "ai"
html += f'<div class="message {bubble_class}">{content}</div>'
return html, state, ""
send.click(update_chat, [msg, chatbot_state], [chatbox, chatbot_state, msg])
msg.submit(update_chat, [msg, chatbot_state], [chatbox, chatbot_state, msg])
demo.launch()