yy0511 commited on
Commit
558803b
·
1 Parent(s): 2382a5e

Add application file

Browse files
Files changed (3) hide show
  1. app.py +42 -0
  2. requirements.txt +2 -0
  3. utils.py +14 -0
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from utils import get_chat_response
3
+ from langchain.memory import ConversationBufferMemory
4
+
5
+ st.title("聊天小助手")
6
+
7
+ with st.sidebar:
8
+ openai_api_key = st.text_input("请输入OpenAI API密钥:", type= "password")
9
+ st.markdown("[获取 OpenAI API 密钥](https://platform.openai.com/docs/examples)")
10
+
11
+ if "memory" not in st.session_state:
12
+ st.session_state["memory"] = ConversationBufferMemory(return_messages=True)
13
+ st.session_state["messages"] = [{"role":"ai",
14
+ "content":"你好,我是你的AI助手,有什么可以帮你的吗?" }]
15
+
16
+ for message in st.session_state["messages"]:
17
+ st.chat_message(message["role"]).write(message["content"])
18
+
19
+ prompt = st.chat_input()
20
+
21
+ if prompt:
22
+ if not openai_api_key:
23
+ st.info("请输入你的OpenAI API Key")
24
+ st.stop
25
+ st.session_state["messages"].append({"role":"human", "content":prompt})
26
+ st.chat_message("human").write(prompt)
27
+
28
+ with st.spinner("AI正在思考,请稍等..."):
29
+ response = get_chat_response(prompt, st.session_state["memory"], openai_api_key)
30
+
31
+ msg = {"role": "ai", "content": response}
32
+ st.session_state["messages"].append(msg)
33
+ st.chat_message("ai").write(response)
34
+
35
+ submit = st.button("开启新一轮对话")
36
+ if submit:
37
+ st.session_state["memory"] = ConversationBufferMemory(return_messages=True)
38
+ st.session_state["messages"] = [{"role":"ai", "content":"你好,我是你的AI助手,有什么可以帮你的吗?"}]
39
+
40
+ st.experimental_rerun()
41
+
42
+
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ langchain_openai
2
+ langchain
utils.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_openai import ChatOpenAI
2
+ from langchain.chains import ConversationChain
3
+ from langchain.memory import ConversationBufferMemory
4
+
5
+ def get_chat_response(prompt, memory,openai_api_key):
6
+ model = ChatOpenAI(model="gpt-3.5-turbo", openai_api_key = openai_api_key, base_url= "https://api.aigc369.com/v1")
7
+ chain = ConversationChain(llm=model, memory=memory)
8
+
9
+ response = chain.invoke({"input":prompt})
10
+ return response["response"]
11
+
12
+ # memory = ConversationBufferMemory(return_messages=True)
13
+ # print(get_chat_response("北京有谁在呀?",memory, openai_api_key="sk-YWPVrZ0KM3B3BFVk40Ee827a2dE34124Af34F881Bf4d79B6"))
14
+ # print(get_chat_response("我的上一个问题是什么?",memory, openai_api_key="sk-YWPVrZ0KM3B3BFVk40Ee827a2dE34124Af34F881Bf4d79B6"))