import os
import logging
from fastapi import APIRouter, HTTPException
from pydantic import BaseModel
from dotenv import load_dotenv
from langchain_groq import ChatGroq
from langchain_openai import OpenAIEmbeddings
from langchain_community.vectorstores import SupabaseVectorStore
from supabase.client import create_client
from langchain.text_splitter import RecursiveCharacterTextSplitter
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain_core.prompts import ChatPromptTemplate
from langchain.chains import create_retrieval_chain
# Load environment variables
load_dotenv()
# Router and logger
router = APIRouter()
logger = logging.getLogger("custom_chatbot")
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
# Validate GROQ API Key
groq_api_key = os.getenv("GROQ_API_KEY")
if not groq_api_key:
logger.error("❌ GROQ_API_KEY is not set in the environment.")
raise RuntimeError("GROQ_API_KEY must be set in .env or environment variables.")
# Supabase Initialization
supabase_url = os.getenv("SUPABASE_URL")
supabase_key = os.getenv("SUPABASE_KEY")
if not supabase_url or not supabase_key:
logger.error("❌ SUPABASE_URL or SUPABASE_KEY is not set in the environment.")
raise RuntimeError("SUPABASE_URL and SUPABASE_KEY must be set in .env or environment variables.")
try:
supabase_client = create_client(supabase_url, supabase_key)
logger.info("✅ Supabase client initialized successfully.")
except Exception as e:
logger.exception("❌ Failed to initialize Supabase client.")
raise
# LLM Initialization
try:
llm = ChatGroq(groq_api_key=groq_api_key, model_name="llama-3.3-70b-versatile")
logger.info("✅ ChatGroq LLM initialized successfully.")
except Exception as e:
logger.exception("❌ Failed to initialize ChatGroq LLM.")
raise
# Prompt Template
prompt_template = ChatPromptTemplate.from_template("""
Answer the question based only on the provided context.
{context}
Question: {input}
""")
generic_prompt_template = ChatPromptTemplate.from_template("""
You are a helpful AI assistant. Answer the following question:
{context}
Question: {input}
""")
# Input schema
class ChatRequest(BaseModel):
query: str
user_id: int
# Main chatbot endpoint
@router.post("/custom-chatbot")
async def custom_chatbot(request: ChatRequest):
query = request.query
user_id = request.user_id
logger.info(f"🤖 Received query from user {user_id}: {query}")
try:
embeddings = OpenAIEmbeddings()
# Changed table_name and query_name to fixed values, and added filter to query
vector_store = SupabaseVectorStore(client=supabase_client, embedding=embeddings, table_name="documents", query_name="match_documents")
retriever = vector_store.as_retriever(search_kwargs={"filter": {"user_id": user_id}})
doc_chain = create_stuff_documents_chain(llm, prompt_template)
rag_chain = create_retrieval_chain(retriever, doc_chain)
response = rag_chain.invoke({"input": query})
logger.info(f"✅ Response generated for user {user_id} using RAG.")
return {
"answer": response["answer"],
"sources": [doc.page_content for doc in response.get("context", [])],
}
except Exception as e:
logger.warning(f"🤷 Falling back to generic response due to: {e}")
generic_chain = create_stuff_documents_chain(llm, generic_prompt_template)
generic_response = generic_chain.invoke({"input": query, "context": []}) # Pass empty context for generic response
return {
"answer": generic_response,
"sources": []
}