Spaces:
Running
Running
import streamlit as st | |
from g4f.client import Client | |
client = Client() | |
st.set_page_config( | |
page_title="merlinGPT-4o Chat", | |
page_icon="", | |
layout="centered" | |
) | |
if "chat_history" not in st.session_state: | |
st.session_state.chat_history = [] | |
st.title("merlinGPT-4o-mini") | |
for message in st.session_state.chat_history: | |
with st.chat_message(message['role']): | |
st.markdown(message['content']) | |
user_prompt = st.chat_input("Ask merlinGPT-4o-mini") | |
if user_prompt: | |
st.chat_message("user").markdown(user_prompt) | |
st.session_state.chat_history.append({"role": "user", "content": user_prompt}) | |
response = client.chat.completions.create( | |
model="gpt-4o-mini", | |
messages=[ | |
{"role": "system", "content": "You are a helpful assistant"}, | |
*st.session_state.chat_history | |
] | |
) | |
final_response = response.choices[0].message.content | |
st.session_state.chat_history.append({"role": "assistant", "content": final_response}) | |
with st.chat_message("merlinGPT"): | |
st.markdown(final_response) | |
#gpt-4o-mini | |
#llama-3-70b-instruct | |
#llama-3-70b-instruct | |
#mixtral-8x7b |