awacke1's picture
Update app.py
5127ab1
import streamlit as st
from streamlit_chat import message
from streamlit_extras.colored_header import colored_header
from streamlit_extras.add_vertical_space import add_vertical_space
from hugchat import hugchat
st.set_page_config(page_title="OpenAssistant-Chatbot-FTW-Open-Source")
with st.sidebar:
st.title('πŸ€—πŸ’¬ HugChat App')
st.markdown('''
## πŸ’‘LLM-powered chatbot built using:
- [Streamlit](<https://streamlit.io/>)
- [HugChat](<https://github.com/Soulter/hugging-chat-api>)
- [OpenAssistant/oasst-sft-6-llama-30b-xor](<https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor>) LLM model
''')
add_vertical_space(5)
if 'generated' not in st.session_state:
st.session_state['generated'] = ["Hi. Please enter a prompt with context on what you need."]
if 'past' not in st.session_state:
st.session_state['past'] = ['Hi.']
input_container = st.container()
colored_header(label='', description='', color_name='blue-70')
response_container = st.container()
def get_text():
input_text = st.text_input("You: ", "", key="input")
return input_text
with input_container:
user_input = get_text()
def generate_response(prompt):
chatbot = hugchat.ChatBot()
response = chatbot.chat(prompt)
return response
with response_container:
if user_input:
response = generate_response(user_input)
st.session_state.past.append(user_input)
st.session_state.generated.append(response)
if st.session_state['generated']:
for i in range(len(st.session_state['generated'])):
message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
message(st.session_state['generated'][i], key=str(i))