|
import os |
|
import streamlit as st |
|
from langchain_core.messages import HumanMessage |
|
from langchain_google_genai import ChatGoogleGenerativeAI |
|
|
|
|
|
os.environ['GOOGLE_API_KEY'] = '********************************' |
|
|
|
def generate_chat_message(prompt): |
|
model = ChatGoogleGenerativeAI(model="gemini-pro") |
|
message = HumanMessage(content=prompt) |
|
response = model.stream([message]) |
|
|
|
response_text = "" |
|
for chunk in response: |
|
response_text += chunk.content |
|
return response_text |
|
|
|
def submit_input(): |
|
st.session_state.chat_history = [] |
|
user_input = st.session_state.user_input |
|
if user_input: |
|
|
|
st.session_state.chat_history.append(("User", user_input)) |
|
|
|
|
|
response = generate_chat_message(user_input) |
|
|
|
|
|
st.session_state.chat_history.append(("AI", response)) |
|
|
|
|
|
st.session_state.user_input = "" |
|
|
|
|
|
st.title("Chat with LLM") |
|
|
|
if "chat_history" not in st.session_state: |
|
st.session_state.chat_history = [] |
|
|
|
|
|
st.text_input("You:", key="user_input", on_change=submit_input) |
|
|
|
|
|
for sender, message in st.session_state.chat_history: |
|
if sender == "User": |
|
st.write(f"**You:** {message}") |
|
else: |
|
st.write(f"**AI:** {message}") |
|
|
|
print(st.session_state) |
|
|