Commit
•
90e7d3d
1
Parent(s):
a167a91
Rename app..py to app.py
Browse files- app..py → app.py +51 -51
app..py → app.py
RENAMED
@@ -1,51 +1,51 @@
|
|
1 |
-
import os
|
2 |
-
import streamlit as st
|
3 |
-
from langchain_core.messages import HumanMessage
|
4 |
-
from langchain_google_genai import ChatGoogleGenerativeAI
|
5 |
-
|
6 |
-
# Set the Google API Key environment variable
|
7 |
-
os.environ['GOOGLE_API_KEY'] = '
|
8 |
-
|
9 |
-
def generate_chat_message(prompt):
|
10 |
-
model = ChatGoogleGenerativeAI(model="gemini-pro")
|
11 |
-
message = HumanMessage(content=prompt)
|
12 |
-
response = model.stream([message])
|
13 |
-
|
14 |
-
response_text = ""
|
15 |
-
for chunk in response:
|
16 |
-
response_text += chunk.content
|
17 |
-
return response_text
|
18 |
-
|
19 |
-
def submit_input():
|
20 |
-
st.session_state.chat_history = []
|
21 |
-
user_input = st.session_state.user_input
|
22 |
-
if user_input:
|
23 |
-
# Append user message to chat history
|
24 |
-
st.session_state.chat_history.append(("User", user_input))
|
25 |
-
|
26 |
-
# Generate response from the model
|
27 |
-
response = generate_chat_message(user_input)
|
28 |
-
|
29 |
-
# Append model response to chat history
|
30 |
-
st.session_state.chat_history.append(("AI", response))
|
31 |
-
|
32 |
-
# Clear the input box by setting session state
|
33 |
-
st.session_state.user_input = ""
|
34 |
-
|
35 |
-
# Streamlit app layout
|
36 |
-
st.title("Chat with LLM")
|
37 |
-
|
38 |
-
if "chat_history" not in st.session_state:
|
39 |
-
st.session_state.chat_history = []
|
40 |
-
|
41 |
-
# Input text box for the user
|
42 |
-
st.text_input("You:", key="user_input", on_change=submit_input)
|
43 |
-
|
44 |
-
# Display the chat history
|
45 |
-
for sender, message in st.session_state.chat_history:
|
46 |
-
if sender == "User":
|
47 |
-
st.write(f"**You:** {message}")
|
48 |
-
else:
|
49 |
-
st.write(f"**AI:** {message}")
|
50 |
-
|
51 |
-
print(st.session_state)
|
|
|
1 |
+
import os
|
2 |
+
import streamlit as st
|
3 |
+
from langchain_core.messages import HumanMessage
|
4 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
5 |
+
|
6 |
+
# Set the Google API Key environment variable
|
7 |
+
os.environ['GOOGLE_API_KEY'] = '********************************'
|
8 |
+
|
9 |
+
def generate_chat_message(prompt):
|
10 |
+
model = ChatGoogleGenerativeAI(model="gemini-pro")
|
11 |
+
message = HumanMessage(content=prompt)
|
12 |
+
response = model.stream([message])
|
13 |
+
|
14 |
+
response_text = ""
|
15 |
+
for chunk in response:
|
16 |
+
response_text += chunk.content
|
17 |
+
return response_text
|
18 |
+
|
19 |
+
def submit_input():
|
20 |
+
st.session_state.chat_history = []
|
21 |
+
user_input = st.session_state.user_input
|
22 |
+
if user_input:
|
23 |
+
# Append user message to chat history
|
24 |
+
st.session_state.chat_history.append(("User", user_input))
|
25 |
+
|
26 |
+
# Generate response from the model
|
27 |
+
response = generate_chat_message(user_input)
|
28 |
+
|
29 |
+
# Append model response to chat history
|
30 |
+
st.session_state.chat_history.append(("AI", response))
|
31 |
+
|
32 |
+
# Clear the input box by setting session state
|
33 |
+
st.session_state.user_input = ""
|
34 |
+
|
35 |
+
# Streamlit app layout
|
36 |
+
st.title("Chat with LLM")
|
37 |
+
|
38 |
+
if "chat_history" not in st.session_state:
|
39 |
+
st.session_state.chat_history = []
|
40 |
+
|
41 |
+
# Input text box for the user
|
42 |
+
st.text_input("You:", key="user_input", on_change=submit_input)
|
43 |
+
|
44 |
+
# Display the chat history
|
45 |
+
for sender, message in st.session_state.chat_history:
|
46 |
+
if sender == "User":
|
47 |
+
st.write(f"**You:** {message}")
|
48 |
+
else:
|
49 |
+
st.write(f"**AI:** {message}")
|
50 |
+
|
51 |
+
print(st.session_state)
|