import streamlit as st import google.generativeai as genai import os from dotenv import load_dotenv # Load API key from .env file load_dotenv() genai.configure(api_key=os.getenv("GEMINI_API_KEY")) # Initialize chat model model = genai.GenerativeModel("gemini-1.5-flash") # Streamlit UI st.title("🤖 AI Chatbot (Gemini 1.5 Flash)") # Add description st.markdown(""" ### About this Chatbot This is an AI-powered chatbot built using: * **Gemini 1.5 Flash** - Google's latest language model * **Streamlit** - For the interactive web interface * **Python** - For backend implementation The chatbot can help you with: - General questions and conversations - Writing and analysis tasks - Problem-solving and explanations """) st.write("Ask me anything!") # Store chat history if "messages" not in st.session_state: st.session_state.messages = [] # Display previous messages for msg in st.session_state.messages: with st.chat_message(msg["role"]): st.markdown(msg["content"]) # Get user input user_input = st.chat_input("Type your message...") if user_input: # Display user message st.chat_message("user").markdown(user_input) # Prepare chat history for context chat_history = [ {"role": "user" if m["role"] == "user" else "model", "parts": [m["content"]]} for m in st.session_state.messages ] # Call Gemini API response = model.generate_content( contents=[{"role": "user", "parts": [user_input]}], generation_config={"temperature": 0.7}, safety_settings=[] ) bot_reply = response.text # Display bot response st.chat_message("assistant").markdown(bot_reply) # Save conversation st.session_state.messages.append({"role": "user", "content": user_input}) st.session_state.messages.append({"role": "assistant", "content": bot_reply}) # Keep only last 3 message exchanges (6 messages total) if len(st.session_state.messages) > 6: st.session_state.messages = st.session_state.messages[-6:]