import os
from openai import OpenAI
from typing import List, Dict
import gradio as gr

# 星火AI接口地址
url = 'https://spark-api-open.xf-yun.com/v2'
# APIPassword, 本地开发可从环境变量改为静态字符串
api_token = os.getenv('SPARKAI_PASSWORD')
# 模拟用户的问题
question = '请介绍以下datawhale这个开源社区'
# 创建openai的对象
client = OpenAI(api_key=api_token, base_url="https://spark-api-open.xf-yun.com/v2")


def chat(chat_query: str, chat_history: List):
    '''
    大模型对话
    :param chat_query: 当前用户问题
    :param chat_history: 历史对话
    :return:
    '''

    # 转换openai的历史
    messages:List = chat_history.copy()

    # 添加最新问题
    chat_history.append({'role': 'user', 'content': chat_query})
    messages.append({'role': 'user', 'content': chat_query})
    # 添加一个空的默认返回
    chat_history.append(gr.ChatMessage(role='assistant', content=''))
    isFirstContent = True  # 回答首帧标识
    # 调用星火大模型
    response = client.chat.completions.create(model="x1", messages=messages, stream=True)
    # 获取结果并返回
    for chunk in response:
        # 判断思维链状态并输出
        if hasattr(chunk.choices[0].delta, 'reasoning_content') and chunk.choices[0].delta.reasoning_content is not None:
            reasoning_content = chunk.choices[0].delta.reasoning_content
            # 添加消息
            chat_history[-1].content += reasoning_content
        # 判断最终结果状态并输出
        if hasattr(chunk.choices[0].delta, 'content') and chunk.choices[0].delta.content is not None:
            content = chunk.choices[0].delta.content
            # 如果开始回答首帧，思考窗口改为思考，重新开启新的回答窗口
            if isFirstContent is True:
                isFirstContent = False
                # 上一个回答窗口改为思考窗口，并作为可伸缩拓展
                chat_history[-1].metadata = {'title':'🛠️ 模型思考内容'}
                # 重新开启一个新的机器回答内容
                chat_history.append(gr.ChatMessage(role='assistant', content=''))
            # 添加放回结果
            chat_history[-1].content += content
        # 流式返回
        yield '', chat_history


# gradio页面绘制
with gr.Blocks() as demo:
    # 聊天对话框
    chatbot = gr.Chatbot([], elem_id="chat-box", label="聊天历史", type='messages')
    # 输入框
    chat_query = gr.Textbox(label="输入问题", placeholder="输入需要咨询的问题")
    # 按钮
    llm_submit_tab = gr.Button("发送", visible=True)
    # 问题样例
    gr.Examples(["请介绍以下datawhale这个开源社区", "针对大学生生活，有什么好的AI应用推荐介绍"], chat_query)
    # 按钮出发逻辑
    llm_submit_tab.click(fn=chat, inputs=[chat_query, chatbot], outputs=[chat_query, chatbot])

if __name__ == "__main__":
    demo.queue().launch()
