111447744 / main.py
d12332's picture
Create main.py
3276685 verified
raw
history blame
1.98 kB
import streamlit as st
import requests
# Streamlit page configuration
st.title("Chatlytic")
# Initialize session state for model and messages if not already present
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = "mixtral-8x7b"
if "messages" not in st.session_state:
st.session_state.messages = []
# Function to clear the chat
def clear_chat():
st.session_state.messages = []
# Button to clear the chat
if st.button('Clear Chat'):
clear_chat()
# Display previous messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Input for new message
if prompt := st.chat_input("What is up?"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# Define the API endpoint
api_endpoint = "https://ka1kuk-llm-api.hf.space/api/v1/chat/completions"
# Prepare the data for the POST request
data = {
"model": st.session_state["openai_model"],
"messages": st.session_state.messages,
"temperature": 0.5,
"top_p": 0.95,
"max_tokens": -1,
"use_cache": False,
"stream": False
}
# Send the POST request to the custom API
response = requests.post(api_endpoint, json=data)
# Check if the request was successful
if response.status_code == 200:
# Get the response content
response_data = response.json()
# Append the assistant's response to the messages
st.session_state.messages.append({"role": "assistant", "content": response_data["choices"][0]["message"]["content"]})
# Display the assistant's response
with st.chat_message("assistant"):
st.markdown(response_data["choices"][0]["message"]["content"])
else:
# Display an error message if the request failed
st.error("Failed to get a response from the custom API.")