Spaces:
Running
Running
import streamlit as st | |
import os | |
from together import Together | |
from utils.helper import * | |
st.set_page_config(layout="wide") | |
st.title("Meta Llama3 π¦") | |
with st.sidebar: | |
with st.expander("Instruction Manual"): | |
st.markdown(""" | |
## Meta Llama3 π¦ Chatbot | |
This Streamlit app allows you to chat with Meta's Llama3 model. | |
### How to Use: | |
1. **Input**: Type your prompt into the chat input box labeled "What is up?". | |
2. **Response**: The app will display a response from Llama3. | |
3. **Chat History**: Previous conversations will be shown on the app. | |
### Credits: | |
- **Developer**: [Yiqiao Yin](https://www.y-yin.io/) | [App URL](https://huggingface.co/spaces/eagle0504/meta-llama) | [LinkedIn](https://www.linkedin.com/in/yiqiaoyin/) | [YouTube](https://youtube.com/YiqiaoYin/) | |
Enjoy chatting with Meta's Llama3 model! | |
""") | |
# Example: | |
st.success("Example: Explain what is supervised learning.") | |
st.success("Example: What is large language model?") | |
st.success("Example: How to conduct an AI experiment?") | |
st.success("Example: Write a tensorflow flow code with a 3-layer neural network model.") | |
# Add a button to clear the session state | |
if st.button("Clear Session"): | |
st.session_state.messages = [] | |
st.experimental_rerun() | |
# Initialize chat history | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
# Display chat messages from history on app rerun | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
# React to user input | |
if prompt := st.chat_input("π Ask any question or feel free to use the examples provided in the left sidebar."): | |
# Display user message in chat message container | |
st.chat_message("user").markdown(prompt) | |
# Add user message to chat history | |
st.session_state.messages.append({"role": "user", "content": prompt}) | |
# API Call | |
response = call_llama(prompt) | |
# Display assistant response in chat message container | |
with st.chat_message("assistant"): | |
st.markdown(response) | |
# Add assistant response to chat history | |
st.session_state.messages.append({"role": "assistant", "content": response}) | |