Spaces:
Sleeping
Sleeping
# app.py | |
from llama_index.core import VectorStoreIndex, StorageContext, ServiceContext, Document | |
from llama_index.vector_stores.qdrant import QdrantVectorStore | |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding | |
from llama_index.llms.together import TogetherLLM | |
from llama_index.core import Settings | |
from qdrant_client import QdrantClient | |
# === Qdrant Config === | |
QDRANT_API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhY2Nlc3MiOiJtIn0.9Pj8v4ACpX3m5U3SZUrG_jzrjGF-T41J5icZ6EPMxnc" | |
QDRANT_URL = "https://d36718f0-be68-4040-b276-f1f39bc1aeb9.us-east4-0.gcp.cloud.qdrant.io" | |
COLLECTION_NAME = "demo-chatbot" | |
# === Embedding & LLM Setup === | |
embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-base-en-v1.5") | |
llm = TogetherLLM( | |
model="meta-llama/Llama-3-8b-chat-hf", | |
api_key="a36246d65d8290f43667350b364c5b6bb8562eb50a4b947eec5bd7e79f2dffc6", | |
temperature=0.3, | |
max_tokens=1024, | |
top_p=0.7 | |
) | |
Settings.llm = llm | |
Settings.embed_model = embed_model | |
# === Qdrant Integration === | |
qdrant_client = QdrantClient(url=QDRANT_URL, api_key=QDRANT_API_KEY) | |
vector_store = QdrantVectorStore( | |
client=qdrant_client, | |
collection_name=COLLECTION_NAME | |
) | |
# === Build Index === | |
index = VectorStoreIndex.from_vector_store(vector_store) | |
query_engine = index.as_query_engine(similarity_top_k=5) | |
# === Enhanced RAG Chain with References === | |
def rag_chain(query: str, include_sources: bool = True) -> str: | |
response = query_engine.query(query) | |
response_text = str(response) | |
if include_sources: | |
references = get_clickable_references_from_response(response) | |
if references: | |
response_text += "\n\n🔗 **Sources:**\n" + "\n".join(references) | |
return response_text | |
# === Clickable Reference Links (top-2 from response nodes) === | |
def get_clickable_references_from_response(response, max_refs: int = 2): | |
seen = set() | |
links = [] | |
for node in response.source_nodes: | |
metadata = node.node.metadata | |
section = metadata.get("section", "Unknown") | |
source = metadata.get("source", "Unknown") | |
key = (section, source) | |
if key not in seen: | |
seen.add(key) | |
if source.startswith("http"): | |
links.append(f"- [{section}]({source})") | |
else: | |
links.append(f"- {section}: {source}") | |
if len(links) >= max_refs: | |
break | |
return links | |
from datetime import datetime | |
import time | |
import gradio as gr | |
# Chat handler | |
def chat_interface(message, history): | |
history = history or [] | |
message = message.strip() | |
if not message: | |
raise ValueError("Please enter a valid question.") | |
timestamp_user = datetime.now().strftime("%H:%M:%S") | |
user_msg = f"🧑 **You**\n{message}\n\n⏱️ {timestamp_user}" | |
bot_msg = "⏳ _Bot is typing..._" | |
history.append((user_msg, bot_msg)) | |
try: | |
time.sleep(0.5) | |
answer = rag_chain(message) # already includes references | |
full_response = answer.strip() | |
timestamp_bot = datetime.now().strftime("%H:%M:%S") | |
bot_msg = f"🤖 **Bot**\n{full_response}\n\n⏱️ {timestamp_bot}" | |
history[-1] = (user_msg, bot_msg) | |
except Exception as e: | |
timestamp_bot = datetime.now().strftime("%H:%M:%S") | |
error_msg = f"🤖 **Bot**\n⚠️ {str(e)}\n\n⏱️ {timestamp_bot}" | |
history[-1] = (user_msg, error_msg) | |
return history, history, "" | |
# Gradio UI | |
def launch_gradio(): | |
with gr.Blocks(css=""" | |
.gr-button { | |
background-color: orange !important; | |
color: white !important; | |
font-weight: bold; | |
border-radius: 6px !important; | |
border: 1px solid darkorange !important; | |
} | |
.gr-button:hover { | |
background-color: darkorange !important; | |
} | |
.gr-textbox textarea { | |
border: 2px solid orange !important; | |
border-radius: 6px !important; | |
padding: 0.75rem !important; | |
font-size: 1rem; | |
} | |
""") as demo: | |
gr.Markdown("# 💬 ImageOnline RAG Chatbot") | |
gr.Markdown("Welcome! Ask about Website Designing, Web Development, App Development, About Us, Digital Marketing etc.") | |
chatbot = gr.Chatbot() | |
state = gr.State([]) | |
with gr.Row(equal_height=True): | |
msg = gr.Textbox( | |
placeholder="Ask your question here...", | |
show_label=False, | |
scale=9 | |
) | |
send_btn = gr.Button("🚀 Send", scale=1) | |
msg.submit(chat_interface, inputs=[msg, state], outputs=[chatbot, state, msg]) | |
send_btn.click(chat_interface, inputs=[msg, state], outputs=[chatbot, state, msg]) | |
with gr.Row(): | |
clear_btn = gr.Button("🧹 Clear Chat") | |
clear_btn.click(fn=lambda: ([], []), outputs=[chatbot, state]) | |
return demo | |
# Launch | |
demo = launch_gradio() | |
demo.launch() | |