Spaces:
Sleeping
Sleeping
File size: 2,908 Bytes
e3a11d1 8e9ccb7 e3a11d1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 |
# import openai
import time
import random
import streamlit as st
# from streamlit_chat import message
import os
from utils import set_main_page, message_border
st.set_page_config(
page_title="streamlit-demo",
page_icon="π§",
layout="wide",
initial_sidebar_state="expanded",
menu_items={
'Get Help': 'https://github.com/vickyli777',
'Report a bug': "https://github.com/vickyli777",
'About': "## a simple demo!"
}
)
set_main_page()
st.sidebar.divider()
def clear_chat():
"""
"""
st.session_state.messages =[]
st.sidebar.button("clear all chats", on_click=clear_chat, type="primary")
st.sidebar.caption("clear histories and start a new chat")
#storing the chat
if 'messages' not in st.session_state:
st.session_state['messages'] = []
# Display chat messages from history on app rerun
for msg in st.session_state.messages:
with st.chat_message(msg["role"]):
chat_box = message_border(mes["role"],msg["content"])
st.markdown(chat_box,unsafe_allow_html=True)
# React to user input
if question := st.chat_input("your AI assistant is here, ask me something"):
# Display user message in chat message container
with st.chat_message(name="user"):
cur_border = message_border("user",question)
st.markdown(cur_border,unsafe_allow_html=True) #paragraph
# st.markdown(prompt)
# Add user message to chat history
st.session_state.messages.append({"role": "user", "content": question})
# response = f"Echo: {prompt}"
# Display assistant response in chat message container
with st.chat_message(name="assistant"):
message_placeholder = st.empty()
full_response = ""
response = random.choice(
[
"Hello there! How can I assist you today?",
"Hi, human! Is there anything I can help you with?",
"Do you need help?",
]
)
# Simulate stream of response with milliseconds delay
for chunk in response.split():
full_response += chunk + " "
# ans_border = message_border(full_response+ "β")
time.sleep(0.5)
# Add a blinking cursor to simulate typing
# message_placeholder.markdown("""
# <p style="border: 1px solid red;
# padding: 5px;
# border-radius: 5px">
# {}
# </p>
# """.format(full_response +"β"))
ans_border = message_border("assistant",full_response)
message_placeholder.markdown(ans_border, unsafe_allow_html=True)
# message_placeholder.markdown(ans_border, unsafe_allow_html=True)
# st.markdown(f':blue[{response}]')
# Add assistant response to chat history
st.session_state.messages.append({"role": "assistant", "content": response})
|