|
import gradio as gr |
|
import os |
|
|
|
from langchain import OpenAI, ConversationChain |
|
from langchain.prompts import PromptTemplate |
|
from langchain.embeddings.openai import OpenAIEmbeddings |
|
from langchain.text_splitter import CharacterTextSplitter |
|
from langchain.vectorstores.faiss import FAISS |
|
from langchain.docstore.document import Document |
|
from langchain.agents import Tool |
|
from langchain.chains.conversation.memory import ConversationBufferMemory |
|
from langchain.utilities import GoogleSearchAPIWrapper |
|
from langchain.agents import initialize_agent |
|
|
|
from langchain.chains.conversation.memory import ConversationEntityMemory |
|
from langchain.chains.conversation.prompt import ENTITY_MEMORY_CONVERSATION_TEMPLATE |
|
|
|
from langchain.agents import ZeroShotAgent, Tool, AgentExecutor |
|
from langchain import SerpAPIWrapper, LLMChain |
|
|
|
|
|
search = GoogleSearchAPIWrapper() |
|
tools = [ |
|
Tool( |
|
name = "Current Search", |
|
func=search.run, |
|
description="Use this allways", |
|
), |
|
] |
|
|
|
|
|
memory = ConversationBufferMemory(memory_key="chat_history") |
|
|
|
|
|
llm=OpenAI(model_name = "text-davinci-003",temperature=0) |
|
agent_chain = initialize_agent( |
|
tools, |
|
llm, |
|
agent="zero-shot-react-description", |
|
verbose=True, |
|
memory=memory |
|
) |
|
|
|
def chat(message,history): |
|
history = history or [] |
|
|
|
response = "" |
|
try: |
|
response = agent_chain.run(input=message) |
|
except KeyError: |
|
if not response: |
|
response = "not found in the site" |
|
history.append((message, response)) |
|
|
|
return history, history |
|
|
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("<h3><center>AITuber</center></h3>") |
|
chatbot = gr.Chatbot() |
|
with gr.Row(): |
|
inp = gr.Textbox(placeholder="Question",label =None) |
|
btn = gr.Button("Run").style(full_width=False) |
|
state = gr.State() |
|
agent_state = gr.State() |
|
btn.click(chat, [inp,state],[chatbot, state]) |
|
if __name__ == '__main__': |
|
demo.launch() |
|
|