File size: 2,241 Bytes
51ce63b d1406e8 efbfb8b 51ce63b d1406e8 efbfb8b bb68426 d1406e8 51ce63b d1406e8 efbfb8b d1406e8 bb68426 51ce63b d1406e8 bb68426 d1406e8 efbfb8b d1406e8 efbfb8b d1406e8 51ce63b efbfb8b 51ce63b d1406e8 51ce63b bb68426 51ce63b bb68426 51ce63b efbfb8b d1406e8 51ce63b d1406e8 51ce63b d1406e8 efbfb8b d1406e8 51ce63b efbfb8b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
import gradio as gr
import requests
import json
import os
import faiss
import numpy as np
from sentence_transformers import SentenceTransformer
# β
Load vector store and data
index = faiss.read_index("faiss_index.bin")
with open("texts.json", "r") as f:
texts = json.load(f)
# β
Load embedding model
model = SentenceTransformer("all-MiniLM-L6-v2")
# β
OpenRouter setup
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "deepseek/deepseek-chat-v3-0324:free"
# β
Helper: Find top-k relevant context
def get_relevant_context(query, k=5):
query_vector = model.encode([query])
scores, indices = index.search(np.array(query_vector), k)
return [texts[i] for i in indices[0] if i < len(texts)]
# β
Chat logic
def chat_with_data(message, history):
greetings = ["hi", "hello", "hey", "salam", "assalamualaikum", "good morning", "good evening"]
message_lower = message.lower().strip()
if any(greet in message_lower for greet in greetings):
return "π Hello! How can I assist you regarding LogiqCurve today?"
context = get_relevant_context(message)
if not context:
return "β Sorry, I can only answer questions related to LogiqCurve and its services."
context_text = "\n".join(context)
prompt = f"You are a helpful assistant for LogiqCurve. Use only the following context:\n\n{context_text}\n\nUser: {message}"
messages = [
{"role": "system", "content": "You are a helpful assistant that answers only using provided context."},
{"role": "user", "content": prompt}
]
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
payload = {
"model": MODEL,
"messages": messages
}
try:
res = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
res.raise_for_status()
return res.json()["choices"][0]["message"]["content"]
except Exception as e:
return f"β Error: {e}"
# β
Gradio UI
gr.ChatInterface(
fn=chat_with_data,
title="MK Assistant",
description="Ask questions related to LogiqCurve. Chat is limited to website-related content only.",
theme="soft"
).launch()
|