File size: 4,244 Bytes
e370f3a d1406e8 51ce63b d1406e8 270b445 efbfb8b bb68426 92bc6ca 270b445 51ce63b 270b445 efbfb8b e370f3a 18e1fff 92bc6ca ad421bc 270b445 a4b560c bb68426 92bc6ca e370f3a 0527279 ad421bc c8900a5 aec0ebd 18e1fff ad421bc e370f3a efa5f27 c8900a5 51ce63b a4b560c 270b445 aff8a04 51ce63b 4b481cd e370f3a ad421bc e370f3a 92bc6ca e5e2956 4b481cd c8900a5 aec0ebd c8900a5 4b481cd c8900a5 4b481cd c8900a5 4b481cd c8900a5 4b481cd c8900a5 4b481cd c8900a5 4b481cd c8900a5 4b481cd e5e2956 92bc6ca 4b481cd c8900a5 4b481cd aec0ebd 4b481cd aec0ebd 4b481cd aec0ebd 4b481cd aec0ebd e5e2956 92bc6ca |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 |
import gradio as gr
import requests
import os
import faiss
import numpy as np
import json
from sentence_transformers import SentenceTransformer
# Load RAG context
with open("texts.json", "r", encoding="utf-8") as f:
texts = json.load(f)
index = faiss.read_index("faiss_index.bin")
embed_model = SentenceTransformer("all-MiniLM-L6-v2")
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "qwen/qwen-2.5-coder-32b-instruct:free"
# π Get relevant context from vector index
def get_context(query, top_k=5):
query_vec = embed_model.encode([query])
D, I = index.search(np.array(query_vec), top_k)
return "\n".join([texts[i] for i in I[0]])
# π¬ Generate reply with context + chat history
def chat_fn(message, history):
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
context = get_context(message)
system_prompt = f"""You are Codex Assistant by LogIQ Curve β a helpful and humanlike AI.
Avoid robotic language. Respond using the following information:
{context}
"""
messages = [{"role": "system", "content": system_prompt}]
for user, assistant in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message})
payload = {"model": MODEL, "messages": messages}
try:
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
response.raise_for_status()
reply = response.json()["choices"][0]["message"]["content"]
except Exception as e:
reply = f"β Error: {e}"
return reply
# β
API endpoint: plain input/output
def api_respond(message):
return chat_fn(message, [])
api_interface = gr.Interface(
fn=api_respond,
inputs=gr.Textbox(lines=1, placeholder="Ask me anything..."),
outputs="text",
live=False
)
# π¨ Custom CSS and chat UI
custom_css = """
* {
font-family: 'Segoe UI', sans-serif;
}
#chat-window {
background: linear-gradient(to bottom right, #f9f9f9, #e0e7ff);
padding: 20px;
height: 80vh;
overflow-y: auto;
border-radius: 12px;
box-shadow: inset 0 0 8px rgba(0,0,0,0.05);
}
.message {
padding: 12px 18px;
margin: 10px 0;
border-radius: 18px;
max-width: 75%;
word-wrap: break-word;
box-shadow: 0 4px 14px rgba(0,0,0,0.08);
}
.message.user {
background-color: #4F46E5;
color: white;
align-self: flex-end;
border-bottom-right-radius: 4px;
}
.message.ai {
background-color: #ffffff;
color: #111;
align-self: flex-start;
border-bottom-left-radius: 4px;
}
#input-row {
display: flex;
padding: 12px;
background: white;
border-top: 1px solid #ddd;
}
textarea {
flex: 1;
padding: 10px;
border-radius: 10px;
border: 1px solid #ccc;
font-size: 16px;
resize: none;
}
button {
margin-left: 10px;
border-radius: 10px;
background-color: #4F46E5;
color: white;
font-weight: bold;
padding: 0 20px;
box-shadow: 0 4px 12px rgba(79, 70, 229, 0.3);
}
"""
with gr.Blocks(css=custom_css) as chatbot_ui:
chatbot_state = gr.State([])
with gr.Column():
chatbox = gr.HTML('<div id="chat-window"></div>', elem_id="chat-window")
with gr.Row(elem_id="input-row"):
msg = gr.Textbox(
placeholder="Type your message...",
lines=1,
show_label=False,
scale=8
)
send = gr.Button("Send", scale=1)
def respond(message, state):
state_pairs = [(state[i][1], state[i+1][1]) for i in range(0, len(state)-1, 2)]
response = chat_fn(message, state_pairs)
state.append(("user", message))
state.append(("ai", response))
html = ""
for role, content in state:
html += f'<div class="message {role}">{content}</div>'
return html, state, ""
send.click(respond, [msg, chatbot_state], [chatbox, chatbot_state, msg])
msg.submit(respond, [msg, chatbot_state], [chatbox, chatbot_state, msg])
# π Launch both UI and API
if __name__ == "__main__":
chatbot_ui.queue().launch(share=True, inline=False)
api_interface.launch(inline=False)
|