ZekeWang's picture
Update app.py
b7f197f verified
import time
import json
import requests
import streamlit as st
st.set_page_config(page_title="Nanbeige Chatbot")
def clear_chat_history():
st.session_state.messages = [{"role": "assistant", "content": "Hello, I'm Nanbeige. How may I assist you today?"}]
with st.sidebar:
st.image('img/logo.png', use_column_width=True)
st.title("Nanbeige Chatbot💬")
st.markdown("""
<p>Created by Nanbeige Lab
</p>""", unsafe_allow_html=True)
temperature = st.sidebar.slider('temperature', min_value=0.01, max_value=1.0, value=0.3, step=0.01)
top_p = st.sidebar.slider('top_p', min_value=0.01, max_value=1.0, value=0.9, step=0.01)
# st.text_input("System prompt: ",
# key='sys_prompt',
# placeholder="If not provided, the default system prompt will be used.")
st.sidebar.button('Clear Chat History', on_click=clear_chat_history)
# Store LLM generated responses
if "messages" not in st.session_state:
st.session_state.messages = [{"role": "assistant", "content": "Hello, I'm Nanbeige. How may I assist you today?"}]
# Display or clear chat message
for message in st.session_state.messages:
with st.chat_message(message['role']):
st.markdown(message["content"])
def generate_response():
messages = st.session_state.messages.copy()
messages = messages[1:]
payload = json.dumps({
'model': 'NBG-plus',
'messages': messages,
'max_tokens': 4096,
'temperature': 0.7,
'stream': True,
'output_accumulate': True,
'top_p': 1,
})
headers = {
'Authorization': f'Bearer {st.secrets["secret_token"]}',
'Content-Type': 'application/json'
}
return requests.request("POST", st.secrets["secret_url"], headers=headers, data=payload, stream=True)
def stream_response(resp):
last_length = 0
for line in resp.iter_lines():
line = line.decode()
if len(line) == 0:
continue
r = json.loads(line[6:])
content = r['modelServerData']['choices'][0]['delta']['content']
yield content[last_length:]
last_length = len(content)
if prompt := st.chat_input():
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
if st.session_state.messages[-1]["role"] != "assistant":
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
stream_resp = generate_response()
resp = st.write_stream(stream_response(stream_resp))
message = {"role": "assistant", "content": resp}
st.session_state.messages.append(message)