import os
from openai import OpenAI
from typing import List, Dict
import gradio as gr
# SDK引入模型
from dwspark.config import Config
from dwspark.models import MultiLang
# 日志
# 加载系统环境变量：SPARKAI_UID、SPARKAI_APP_ID、SPARKAI_API_KEY、SPARKAI_API_SECRET
config = Config()
model = MultiLang(config, stream=True)


def chat(chat_query: str, chat_history: List):
    '''
    大模型对话
    :param chat_query: 当前用户问题
    :param chat_history: 历史对话
    :return:
    '''

    # 转换openai的历史
    messages:List = chat_history.copy()

    # 添加最新问题
    chat_history.append({'role': 'user', 'content': chat_query})
    messages.append({'role': 'user', 'content': chat_query})
    # 添加一个空的默认返回
    chat_history.append(gr.ChatMessage(role='assistant', content=''))
    # 调用星火大模型
    response = model.generate_stream(messages)
    # 获取结果并返回
    for chunk in response:
        # 添加放回结果
        chat_history[-1].content += chunk
        # 流式返回
        yield '', chat_history


# gradio页面绘制
with gr.Blocks() as demo:
    # 聊天对话框
    chatbot = gr.Chatbot([], elem_id="chat-box", label="聊天历史", type='messages')
    # 输入框
    chat_query = gr.Textbox(label="输入问题", placeholder="输入需要咨询的问题")
    # 按钮
    llm_submit_tab = gr.Button("发送", visible=True)
    # 问题样例
    gr.Examples(["100字の作文を書いてください。"], chat_query)
    # 按钮出发逻辑
    llm_submit_tab.click(fn=chat, inputs=[chat_query, chatbot], outputs=[chat_query, chatbot])

if __name__ == "__main__":
    demo.queue().launch()
