Upload 2 files
Browse files- app.py +26 -20
- requirements.txt +4 -2
app.py
CHANGED
@@ -1,39 +1,45 @@
|
|
1 |
-
#Hello! It seems like you want to import the Streamlit library in Python. Streamlit is a powerful open-source framework used for building web applications with interactive data visualizations and machine learning models. To import Streamlit, you'll need to ensure that you have it installed in your Python environment.
|
2 |
-
#Once you have Streamlit installed, you can import it into your Python script using the import statement,
|
3 |
-
|
4 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
|
7 |
-
|
|
|
|
|
|
|
8 |
|
9 |
|
10 |
-
#Function to return the response
|
11 |
def load_answer(question):
|
12 |
-
llm = HuggingFaceHub(repo_id="google/flan-t5-large")
|
13 |
-
answer=llm(question)
|
14 |
-
return answer
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
-
#App UI starts here
|
18 |
-
st.set_page_config(page_title="LangChain Demo", page_icon=":robot:")
|
19 |
-
st.header("LangChain Demo")
|
20 |
|
21 |
-
#Gets the user input
|
22 |
def get_text():
|
23 |
-
input_text = st.text_input("You: ", key=
|
24 |
return input_text
|
25 |
|
26 |
|
|
|
|
|
|
|
27 |
user_input = get_text()
|
28 |
-
submit = st.button(
|
29 |
|
30 |
-
#If generate button is clicked
|
31 |
if submit:
|
32 |
|
33 |
-
st.subheader("Answer:")
|
34 |
-
|
35 |
-
|
36 |
response = load_answer(user_input)
|
|
|
37 |
|
38 |
-
st.write(response)
|
39 |
-
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
from langchain.chat_models import ChatOpenAI
|
3 |
+
from langchain.schema import AIMessage, HumanMessage, SystemMessage
|
4 |
+
|
5 |
+
# From here down is all the StreamLit UI.
|
6 |
+
st.set_page_config(page_title="LangChain Demo", page_icon=":robot:")
|
7 |
+
st.header("Hey, I'm your Chat GPT")
|
8 |
|
9 |
|
10 |
+
if "sessionMessages" not in st.session_state:
|
11 |
+
st.session_state.sessionMessages = [
|
12 |
+
SystemMessage(content="You are a helpful assistant.")
|
13 |
+
]
|
14 |
|
15 |
|
|
|
16 |
def load_answer(question):
|
|
|
|
|
|
|
17 |
|
18 |
+
st.session_state.sessionMessages.append(HumanMessage(content=question))
|
19 |
+
|
20 |
+
assistant_answer = chat(st.session_state.sessionMessages)
|
21 |
+
|
22 |
+
st.session_state.sessionMessages.append(
|
23 |
+
AIMessage(content=assistant_answer.content)
|
24 |
+
)
|
25 |
+
|
26 |
+
return assistant_answer.content
|
27 |
|
|
|
|
|
|
|
28 |
|
|
|
29 |
def get_text():
|
30 |
+
input_text = st.text_input("You: ", key=input)
|
31 |
return input_text
|
32 |
|
33 |
|
34 |
+
chat = ChatOpenAI(temperature=0)
|
35 |
+
|
36 |
+
|
37 |
user_input = get_text()
|
38 |
+
submit = st.button("Generate")
|
39 |
|
|
|
40 |
if submit:
|
41 |
|
|
|
|
|
|
|
42 |
response = load_answer(user_input)
|
43 |
+
st.subheader("Answer:")
|
44 |
|
45 |
+
st.write(response, key=1)
|
|
requirements.txt
CHANGED
@@ -1,3 +1,5 @@
|
|
|
|
1 |
langchain
|
2 |
-
|
3 |
-
|
|
|
|
1 |
+
streamlit
|
2 |
langchain
|
3 |
+
openai
|
4 |
+
streamlit-chat
|
5 |
+
python-dotenv
|