import streamlit as st from streamlit_chat import message from langchain_openai import ChatOpenAI from langchain.chains import ConversationChain from langchain.chains.conversation.memory import (ConversationBufferMemory, ConversationSummaryMemory, ConversationBufferWindowMemory ) if 'conversation' not in st.session_state: st.session_state['conversation'] =None if 'messages' not in st.session_state: st.session_state['messages'] =[] if 'API_Key' not in st.session_state: st.session_state['API_Key'] ='' # Setting page title and header st.set_page_config(page_title="ChatMate: Your Professional AI Conversation Partner Solution", page_icon=":robot_face:") st.markdown("

ChatMate

", unsafe_allow_html=True) st.markdown("

OpenAI-like Chat Experience

", unsafe_allow_html=True) st.markdown("

By Lorentz Yeung

", unsafe_allow_html=True) st.markdown("

I am capable of recalling previous parts of our conversation, such as remembering your name if you share it with me.

", unsafe_allow_html=True) st.session_state['API_Key']= st.text_input("First, to get it work, put your OpenAI API Key here please, the system will enter for you automatically.",type="password") st.markdown("

How can I help you today?

", unsafe_allow_html=True) # API Keys # st.sidebar.text_input() will automatically update st.session_state['API_Key'] with the input value whenever the user types into the field. st.sidebar.title("Introduction") st.sidebar.markdown(""" ChatMate is an advanced conversational AI interface, expertly crafted to demonstrate the fusion of Streamlit's user-friendly design and OpenAI's powerful GPT-3.5 model. Here are its highlights: """, unsafe_allow_html=True) st.markdown("

By Lorentz Yeung

", unsafe_allow_html=True) # Function to get response from the model def getresponse(userInput, api_key): if st.session_state['conversation'] is None: llm = ChatOpenAI( temperature=0, openai_api_key=api_key, model_name='gpt-3.5-turbo' ) st.session_state['conversation'] = ConversationChain( llm=llm, verbose=True, memory=ConversationSummaryMemory(llm=llm) ) response=st.session_state['conversation'].predict(input=userInput) print(st.session_state['conversation'].memory.buffer) return response response_container = st.container() # container for user input text box container = st.container() # User input and response display with container: with st.form(key='my_form', clear_on_submit=True): user_input = st.text_area("Ask me questions please", key='input', height=100) submit_button = st.form_submit_button(label='Send') if submit_button: st.session_state['messages'].append(user_input) model_response=getresponse(user_input,st.session_state['API_Key']) st.session_state['messages'].append(model_response) with response_container: for i in range(len(st.session_state['messages'])): if (i % 2) == 0: message(st.session_state['messages'][i], is_user=True, key=str(i) + '_user', avatar_style="") else: message(st.session_state['messages'][i], key=str(i) + '_AI', avatar_style="")