import streamlit as st
import openai
import os
from openai import OpenAI
# ================ streamlit run your_script.py ====================#
#os.environ['OPENAI_API_KEY'] = "sk-d1799f55f0b840f79381f4116f428cbaxxx"

api_key = os.environ.get('OPENAI_API_KEY')
print(api_key)
# 设置 OpenAI API 密钥
#openai.api_key = "your_openai_api_key"
st.set_page_config(
    page_title="deepseek大模型的使用",
    page_icon=":smiley:",
    layout="wide",
)
# 设置页面标题
st.title("🤖 Deepseek-like Chatbot")

# 初始化聊天记录
if "messages" not in st.session_state:
    st.session_state.messages = [
        {"role": "assistant", "content": "How can I help you?"}
    ]

# 显示历史聊天记录
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.markdown(message["content"])

# 用户输入
if prompt := st.chat_input("What is up?"):
    # 将用户输入添加到聊天记录
    st.session_state.messages.append({"role": "user", "content": prompt})
    with st.chat_message("user"):
        st.markdown(prompt)

    # 调用 OpenAI API 获取回复
    with st.chat_message("assistant"):
        messages = [
            {"role": m["role"], "content": m["content"]}
            for m in st.session_state.messages
        ]
        client = OpenAI(api_key=api_key, base_url="https://api.deepseek.com")
        try:
            response = client.chat.completions.create(
                model="deepseek-chat",
                messages=messages,
                stream=True,
            )
            full_response = ""
            message_placeholder = st.empty()

            for chunk in response:
                if chunk.choices[0].delta.content:
                    full_response += chunk.choices[0].delta.content
                    message_placeholder.markdown(full_response + "▌")
            message_placeholder.markdown(full_response)
            st.session_state.messages.append({"role": "assistant", "content": full_response})
        except Exception as e:
            print(e)
            st.error(f"发生错误: {e}")

    # 将助手回复添加到聊天记录
    #st.session_state.messages.append({"role": "assistant", "content": full_response})


