|
|
import langchain |
|
|
from langchain.embeddings.openai import OpenAIEmbeddings |
|
|
|
|
|
from langchain.vectorstores import FAISS |
|
|
from langchain.text_splitter import CharacterTextSplitter |
|
|
from langchain.llms import OpenAI |
|
|
from langchain.chains import VectorDBQA |
|
|
from langchain.chains import RetrievalQA |
|
|
from langchain.document_loaders import DirectoryLoader |
|
|
from langchain.chains import ConversationalRetrievalChain |
|
|
from langchain.memory import ConversationBufferMemory |
|
|
from langchain.evaluation.qa import QAGenerateChain |
|
|
import magic |
|
|
import os |
|
|
import streamlit as st |
|
|
from streamlit_chat import message |
|
|
|
|
|
st.title("Welcome to BhubBot") |
|
|
|
|
|
if 'responses' not in st.session_state: |
|
|
st.session_state['responses'] = ["How can I assist you?"] |
|
|
|
|
|
if 'requests' not in st.session_state: |
|
|
st.session_state['requests'] = [] |
|
|
|
|
|
openai_api_key = os.getenv("OPENAI_API_KEY", "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") |
|
|
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key) |
|
|
new_db = FAISS.load_local("faiss_leave_policy_RCV", embeddings) |
|
|
llm = OpenAI(openai_api_key=openai_api_key, temperature=0.0) |
|
|
|
|
|
|
|
|
memory= ConversationBufferMemory(memory_key="chat_history", return_messages=True) |
|
|
retriever = new_db.as_retriever() |
|
|
chain = ConversationalRetrievalChain.from_llm(llm=llm, chain_type="stuff", memory= memory,retriever=retriever, verbose=False) |
|
|
|
|
|
|
|
|
response_container = st.container() |
|
|
|
|
|
textcontainer = st.container() |
|
|
|
|
|
|
|
|
with textcontainer: |
|
|
query = st.text_input(label="Please Enter Your Prompt Here: ", placeholder="Ask me") |
|
|
if query: |
|
|
with st.spinner("Cooking..."): |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
response = chain.run(query) |
|
|
st.session_state.requests.append(query) |
|
|
st.session_state.responses.append(response) |
|
|
with response_container: |
|
|
if st.session_state['responses']: |
|
|
|
|
|
for i in range(len(st.session_state['responses'])): |
|
|
message(st.session_state['responses'][i],key=str(i)) |
|
|
if i < len(st.session_state['requests']): |
|
|
message(st.session_state["requests"][i], is_user=True,key=str(i)+ '_user') |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|