|
import gradio as gr |
|
import os |
|
|
|
from langchain_huggingface import HuggingFaceEmbeddings |
|
|
|
from langchain_core.runnables import RunnableParallel |
|
from langchain_core.runnables import RunnablePassthrough |
|
from langchain_core.output_parsers import StrOutputParser |
|
from langchain_core.prompts import PromptTemplate |
|
from langchain_chroma import Chroma |
|
|
|
from langchain_community.llms import GPT4All |
|
from huggingface_hub import hf_hub_download |
|
|
|
|
|
model_path = "models" |
|
model_name = "bling-phi-3.gguf" |
|
hf_hub_download(repo_id="llmware/bling-phi-3-gguf", filename=model_name, local_dir=model_path) |
|
|
|
|
|
llm = GPT4All(model="./models/bling-phi-3.gguf") |
|
|
|
|
|
|
|
embedding_model = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") |
|
|
|
|
|
vector_store = Chroma(persist_directory="./chroma_db", embedding_function=embedding_model) |
|
|
|
|
|
template = """<human>: {context} \n {question} \n<bot>:""" |
|
prompt = PromptTemplate.from_template(template) |
|
|
|
|
|
qa_chain_with_sources = ( |
|
RunnableParallel( |
|
{ |
|
"context": vector_store.as_retriever(), |
|
"question": RunnablePassthrough(), |
|
} |
|
) |
|
| { |
|
"answer": prompt | llm | StrOutputParser(), |
|
"sources": lambda x: [doc.metadata.get("source", "Unknown") for doc in x["context"]], |
|
} |
|
) |
|
|
|
|
|
def rag_query(query, history): |
|
|
|
response = qa_chain_with_sources.invoke(query) |
|
|
|
answer = response["answer"] |
|
unique_sources = list(set(response["sources"])) |
|
|
|
|
|
output = f"Answer: {answer}\n\nSources:\n" + "\n".join(unique_sources) |
|
|
|
return output |
|
|
|
|
|
""" |
|
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface |
|
""" |
|
|
|
demo = gr.ChatInterface( |
|
fn=rag_query, |
|
title="WEHI Student Intern Chatbot Demo", |
|
type='messages', |
|
description="Ask questions related to your WEHI internship and get answers with sources.", |
|
examples=[ |
|
"What flexibility is there for the internship?", |
|
"What are the key things to do before the weekly meetings?", |
|
"How do I tackle complex and ambiguous projects?", |
|
"What happens over Easter break at WEHI?", |
|
"What are the tasks for the REDMANE Data Ingestion team?", |
|
"When is the final presentation due?", |
|
"What is Nectar?", |
|
"Is the internship remote or in person?" |
|
], |
|
) |
|
|
|
demo.launch() |
|
|
|
|