|
import streamlit as st |
|
import requests |
|
|
|
st.title("Chatlytic") |
|
|
|
|
|
if "openai_model" not in st.session_state: |
|
st.session_state["openai_model"] = "mixtral-8x7b" |
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
|
|
def clear_chat(): |
|
st.session_state.messages = [] |
|
|
|
|
|
if st.button('Clear Chat'): |
|
clear_chat() |
|
|
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
|
|
if prompt := st.chat_input("What is up?"): |
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
with st.chat_message("user"): |
|
st.markdown(prompt) |
|
|
|
|
|
api_endpoint = "https://ka1kuk-llm-api.hf.space/api/v1/chat/completions" |
|
|
|
|
|
data = { |
|
"model": st.session_state["openai_model"], |
|
"messages": st.session_state.messages, |
|
"temperature": 0.5, |
|
"top_p": 0.95, |
|
"max_tokens": -1, |
|
"use_cache": False, |
|
"stream": False |
|
} |
|
|
|
|
|
response = requests.post(api_endpoint, json=data) |
|
|
|
|
|
if response.status_code == 200: |
|
|
|
response_data = response.json() |
|
|
|
st.session_state.messages.append({"role": "assistant", "content": response_data["choices"][0]["message"]["content"]}) |
|
|
|
with st.chat_message("assistant"): |
|
st.markdown(response_data["choices"][0]["message"]["content"]) |
|
else: |
|
|
|
st.error("Failed to get a response from the custom API.") |