Update app.py
Browse files
app.py
CHANGED
@@ -15,11 +15,14 @@ with st.sidebar:
|
|
15 |
st.image('img/logo.png', use_column_width=True)
|
16 |
st.title("Nanbeige Chatbot💬")
|
17 |
st.markdown("""
|
18 |
-
<p>Created by Nanbeige Lab
|
19 |
</p>""", unsafe_allow_html=True)
|
20 |
|
21 |
temperature = st.sidebar.slider('temperature', min_value=0.01, max_value=1.0, value=0.3, step=0.01)
|
22 |
top_p = st.sidebar.slider('top_p', min_value=0.01, max_value=1.0, value=0.9, step=0.01)
|
|
|
|
|
|
|
23 |
|
24 |
st.sidebar.button('Clear Chat History', on_click=clear_chat_history)
|
25 |
|
@@ -40,9 +43,11 @@ def generate_response():
|
|
40 |
payload = json.dumps({
|
41 |
'model': 'NBG-plus',
|
42 |
'messages': messages,
|
43 |
-
'max_tokens':
|
44 |
-
'temperature': 0.
|
45 |
-
'
|
|
|
|
|
46 |
})
|
47 |
|
48 |
headers = {
|
@@ -50,14 +55,19 @@ def generate_response():
|
|
50 |
'Content-Type': 'application/json'
|
51 |
}
|
52 |
|
53 |
-
|
54 |
-
return r.json()['reply']
|
55 |
|
56 |
|
57 |
-
def stream_response(
|
58 |
-
|
59 |
-
|
60 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
|
63 |
if prompt := st.chat_input():
|
@@ -68,8 +78,8 @@ if prompt := st.chat_input():
|
|
68 |
if st.session_state.messages[-1]["role"] != "assistant":
|
69 |
with st.chat_message("assistant"):
|
70 |
with st.spinner("Thinking..."):
|
71 |
-
|
72 |
-
st.write_stream(stream_response(
|
73 |
|
74 |
-
message = {"role": "assistant", "content":
|
75 |
st.session_state.messages.append(message)
|
|
|
15 |
st.image('img/logo.png', use_column_width=True)
|
16 |
st.title("Nanbeige Chatbot💬")
|
17 |
st.markdown("""
|
18 |
+
<p>Created by Nanbeige Lab
|
19 |
</p>""", unsafe_allow_html=True)
|
20 |
|
21 |
temperature = st.sidebar.slider('temperature', min_value=0.01, max_value=1.0, value=0.3, step=0.01)
|
22 |
top_p = st.sidebar.slider('top_p', min_value=0.01, max_value=1.0, value=0.9, step=0.01)
|
23 |
+
# st.text_input("System prompt: ",
|
24 |
+
# key='sys_prompt',
|
25 |
+
# placeholder="If not provided, the default system prompt will be used.")
|
26 |
|
27 |
st.sidebar.button('Clear Chat History', on_click=clear_chat_history)
|
28 |
|
|
|
43 |
payload = json.dumps({
|
44 |
'model': 'NBG-plus',
|
45 |
'messages': messages,
|
46 |
+
'max_tokens': 4096,
|
47 |
+
'temperature': 0.7,
|
48 |
+
'stream': True,
|
49 |
+
'output_accumulate': True,
|
50 |
+
'top_p': 1,
|
51 |
})
|
52 |
|
53 |
headers = {
|
|
|
55 |
'Content-Type': 'application/json'
|
56 |
}
|
57 |
|
58 |
+
return requests.request("POST", st.secrets["secret_url"], headers=headers, data=payload, stream=True)
|
|
|
59 |
|
60 |
|
61 |
+
def stream_response(resp):
|
62 |
+
last_length = 0
|
63 |
+
for line in resp.iter_lines():
|
64 |
+
line = line.decode()
|
65 |
+
if len(line) == 0:
|
66 |
+
continue
|
67 |
+
r = json.loads(line[6:])
|
68 |
+
content = r['modelServerData']['choices'][0]['delta']['content']
|
69 |
+
yield content[last_length:]
|
70 |
+
last_length = len(content)
|
71 |
|
72 |
|
73 |
if prompt := st.chat_input():
|
|
|
78 |
if st.session_state.messages[-1]["role"] != "assistant":
|
79 |
with st.chat_message("assistant"):
|
80 |
with st.spinner("Thinking..."):
|
81 |
+
stream_resp = generate_response()
|
82 |
+
resp = st.write_stream(stream_response(stream_resp))
|
83 |
|
84 |
+
message = {"role": "assistant", "content": resp}
|
85 |
st.session_state.messages.append(message)
|