mirxakamran893's picture
Update app.py
92bc6ca verified
import gradio as gr
import requests
import os
import faiss
import numpy as np
import json
from sentence_transformers import SentenceTransformer
# Load RAG context
with open("texts.json", "r", encoding="utf-8") as f:
texts = json.load(f)
index = faiss.read_index("faiss_index.bin")
embed_model = SentenceTransformer("all-MiniLM-L6-v2")
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "qwen/qwen-2.5-coder-32b-instruct:free"
# πŸ” Get relevant context from vector index
def get_context(query, top_k=5):
query_vec = embed_model.encode([query])
D, I = index.search(np.array(query_vec), top_k)
return "\n".join([texts[i] for i in I[0]])
# πŸ’¬ Generate reply with context + chat history
def chat_fn(message, history):
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
context = get_context(message)
system_prompt = f"""You are Codex Assistant by LogIQ Curve β€” a helpful and humanlike AI.
Avoid robotic language. Respond using the following information:
{context}
"""
messages = [{"role": "system", "content": system_prompt}]
for user, assistant in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
payload = {"model": MODEL, "messages": messages}
try:
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
response.raise_for_status()
reply = response.json()["choices"][0]["message"]["content"]
except Exception as e:
reply = f"❌ Error: {e}"
return reply
# βœ… API endpoint: plain input/output
def api_respond(message):
return chat_fn(message, [])
api_interface = gr.Interface(
fn=api_respond,
inputs=gr.Textbox(lines=1, placeholder="Ask me anything..."),
outputs="text",
live=False
)
# 🎨 Custom CSS and chat UI
custom_css = """
* {
font-family: 'Segoe UI', sans-serif;
}
#chat-window {
background: linear-gradient(to bottom right, #f9f9f9, #e0e7ff);
padding: 20px;
height: 80vh;
overflow-y: auto;
border-radius: 12px;
box-shadow: inset 0 0 8px rgba(0,0,0,0.05);
}
.message {
padding: 12px 18px;
margin: 10px 0;
border-radius: 18px;
max-width: 75%;
word-wrap: break-word;
box-shadow: 0 4px 14px rgba(0,0,0,0.08);
}
.message.user {
background-color: #4F46E5;
color: white;
align-self: flex-end;
border-bottom-right-radius: 4px;
}
.message.ai {
background-color: #ffffff;
color: #111;
align-self: flex-start;
border-bottom-left-radius: 4px;
}
#input-row {
display: flex;
padding: 12px;
background: white;
border-top: 1px solid #ddd;
}
textarea {
flex: 1;
padding: 10px;
border-radius: 10px;
border: 1px solid #ccc;
font-size: 16px;
resize: none;
}
button {
margin-left: 10px;
border-radius: 10px;
background-color: #4F46E5;
color: white;
font-weight: bold;
padding: 0 20px;
box-shadow: 0 4px 12px rgba(79, 70, 229, 0.3);
}
"""
with gr.Blocks(css=custom_css) as chatbot_ui:
chatbot_state = gr.State([])
with gr.Column():
chatbox = gr.HTML('<div id="chat-window"></div>', elem_id="chat-window")
with gr.Row(elem_id="input-row"):
msg = gr.Textbox(
placeholder="Type your message...",
lines=1,
show_label=False,
scale=8
)
send = gr.Button("Send", scale=1)
def respond(message, state):
state_pairs = [(state[i][1], state[i+1][1]) for i in range(0, len(state)-1, 2)]
response = chat_fn(message, state_pairs)
state.append(("user", message))
state.append(("ai", response))
html = ""
for role, content in state:
html += f'<div class="message {role}">{content}</div>'
return html, state, ""
send.click(respond, [msg, chatbot_state], [chatbox, chatbot_state, msg])
msg.submit(respond, [msg, chatbot_state], [chatbox, chatbot_state, msg])
# πŸš€ Launch both UI and API
if __name__ == "__main__":
chatbot_ui.queue().launch(share=True, inline=False)
api_interface.launch(inline=False)