File size: 1,846 Bytes
51ce63b
d1406e8
51ce63b
d1406e8
 
270b445
efbfb8b
bb68426
a4b560c
270b445
51ce63b
 
270b445
 
efbfb8b
a4b560c
 
 
51ce63b
a4b560c
270b445
 
 
a4b560c
bb68426
a4b560c
270b445
 
 
 
 
efa5f27
a4b560c
 
d1406e8
a4b560c
d1406e8
51ce63b
270b445
 
 
 
 
51ce63b
 
bb68426
a4b560c
51ce63b
 
 
a4b560c
270b445
 
51ce63b
efa5f27
 
 
51ce63b
a4b560c
 
e10a8ec
a4b560c
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import gradio as gr
import requests
import os
import faiss
import numpy as np
import json
from sentence_transformers import SentenceTransformer

# βœ… Load RAG-related files
with open("texts.json", "r", encoding="utf-8") as f:
    texts = json.load(f)

index = faiss.read_index("faiss_index.bin")
embed_model = SentenceTransformer("all-MiniLM-L6-v2")

# βœ… Use your OpenRouter API key
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "qwen/qwen-2.5-coder-32b-instruct:free"

# βœ… Function to search relevant context
def get_context(query, top_k=5):
    query_vec = embed_model.encode([query])
    D, I = index.search(np.array(query_vec), top_k)
    return "\n".join([texts[i] for i in I[0]])

# βœ… Chat handler function
def chat_fn(message, history):
    headers = {
        "Authorization": f"Bearer {API_KEY}",
        "Content-Type": "application/json"
    }

    context = get_context(message)

    messages = [
        {"role": "system", "content": "You are a helpful assistant. Use the following context to answer: " + context}
    ]

    for user, assistant in history:
        messages.append({"role": "user", "content": user})
        messages.append({"role": "assistant", "content": assistant})

    messages.append({"role": "user", "content": message})

    payload = {
        "model": MODEL,
        "messages": messages
    }

    try:
        response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload)
        response.raise_for_status()
        reply = response.json()["choices"][0]["message"]["content"]
    except Exception as e:
        reply = f"❌ Error: {e}"

    return reply

# βœ… Launch Gradio ChatInterface
gr.ChatInterface(
    fn=chat_fn,
    title="CODEX MIRXA KAMRAN",
    description="Chat with AI MODEL trained By Mirxa Kamran",
    theme="soft"
).launch()