import os import streamlit as st from langchain_core.messages import HumanMessage from langchain_google_genai import ChatGoogleGenerativeAI # Set the Google API Key environment variable os.environ['GOOGLE_API_KEY'] = '********************************' def generate_chat_message(prompt): model = ChatGoogleGenerativeAI(model="gemini-pro") message = HumanMessage(content=prompt) response = model.stream([message]) response_text = "" for chunk in response: response_text += chunk.content return response_text def submit_input(): st.session_state.chat_history = [] user_input = st.session_state.user_input if user_input: # Append user message to chat history st.session_state.chat_history.append(("User", user_input)) # Generate response from the model response = generate_chat_message(user_input) # Append model response to chat history st.session_state.chat_history.append(("AI", response)) # Clear the input box by setting session state st.session_state.user_input = "" # Streamlit app layout st.title("Chat with LLM") if "chat_history" not in st.session_state: st.session_state.chat_history = [] # Input text box for the user st.text_input("You:", key="user_input", on_change=submit_input) # Display the chat history for sender, message in st.session_state.chat_history: if sender == "User": st.write(f"**You:** {message}") else: st.write(f"**AI:** {message}") print(st.session_state)