Du Mingzhe
commited on
Commit
•
143727a
1
Parent(s):
450f5f1
update
Browse files- app.py +11 -3
- components.py +2 -1
app.py
CHANGED
@@ -1,8 +1,6 @@
|
|
1 |
-
from pinecone import Pinecone
|
2 |
-
|
3 |
-
from openai import OpenAI
|
4 |
import streamlit as st
|
5 |
import uuid
|
|
|
6 |
|
7 |
from components import PersonalIndexClient, LLMClient
|
8 |
|
@@ -16,10 +14,12 @@ if "messages" not in st.session_state:
|
|
16 |
st.session_state.messages = []
|
17 |
st.session_state['sid'] = uuid.uuid1().hex
|
18 |
|
|
|
19 |
for message in st.session_state.messages:
|
20 |
with st.chat_message(message["role"]):
|
21 |
st.markdown(message["content"])
|
22 |
|
|
|
23 |
if prompt := st.chat_input("What's up?"):
|
24 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
25 |
with st.chat_message("user"):
|
@@ -30,4 +30,12 @@ if prompt := st.chat_input("What's up?"):
|
|
30 |
response = st.write_stream(stream)
|
31 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
import uuid
|
3 |
+
import json
|
4 |
|
5 |
from components import PersonalIndexClient, LLMClient
|
6 |
|
|
|
14 |
st.session_state.messages = []
|
15 |
st.session_state['sid'] = uuid.uuid1().hex
|
16 |
|
17 |
+
# Display History
|
18 |
for message in st.session_state.messages:
|
19 |
with st.chat_message(message["role"]):
|
20 |
st.markdown(message["content"])
|
21 |
|
22 |
+
# New Click
|
23 |
if prompt := st.chat_input("What's up?"):
|
24 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
25 |
with st.chat_message("user"):
|
|
|
30 |
response = st.write_stream(stream)
|
31 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
32 |
|
33 |
+
current_history = json.dumps(st.session_state.messages)
|
34 |
+
|
35 |
+
pinecone_client.create(data=[{
|
36 |
+
'id': '123',
|
37 |
+
'content': current_history,
|
38 |
+
'metadata': {'hello': 'world', 'content': current_history},
|
39 |
+
}])
|
40 |
+
|
41 |
|
components.py
CHANGED
@@ -16,7 +16,7 @@ class LLMClient():
|
|
16 |
current_time = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
17 |
|
18 |
# System Prompt
|
19 |
-
messages += [{"role": "system", "content": f"1) You're Du Mingzhe
|
20 |
|
21 |
# Session History
|
22 |
messages += [{"role": h["role"], "content": h["content"]} for h in history]
|
@@ -27,6 +27,7 @@ class LLMClient():
|
|
27 |
messages = messages,
|
28 |
stream=True,
|
29 |
)
|
|
|
30 |
return stream
|
31 |
|
32 |
class EmbeddingModel(object):
|
|
|
16 |
current_time = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
17 |
|
18 |
# System Prompt
|
19 |
+
messages += [{"role": "system", "content": f"1) You're Du Mingzhe. 2) Don't claim you are created by OpenAI. 3) Current time is {current_time}."}]
|
20 |
|
21 |
# Session History
|
22 |
messages += [{"role": h["role"], "content": h["content"]} for h in history]
|
|
|
27 |
messages = messages,
|
28 |
stream=True,
|
29 |
)
|
30 |
+
|
31 |
return stream
|
32 |
|
33 |
class EmbeddingModel(object):
|