import gradio as gr
import random
import time
from http import HTTPStatus
from dashscope import Generation

def call_with_stream(msg):
    print(msg)
    messages = [
        {'role': 'user', 'content': msg}]
    responses = Generation.call(
        Generation.Models.qwen_turbo,
        api_key='*****',
        messages=messages,
        result_format='message',  # set the result to be "message" format.
        stream=True,
        incremental_output=True  # get streaming output incrementally
    )
    full_content = ''  # with incrementally we need to merge output.
    for response in responses:
        if response.status_code == HTTPStatus.OK:
            full_content += response.output.choices[0]['message']['content']
            print(response)
        else:
            print('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
                response.request_id, response.status_code,
                response.code, response.message
            ))
    return full_content
# 使用Blocks创建Gradio界面
with gr.Blocks() as demo:
    # 创建一个Chatbot组件，用于显示对话历史
    chatbot = gr.Chatbot(height=600)
    # 创建一个Textbox组件，让用户输入消息
    msg = gr.Textbox(lines=2, elem_id="message", placeholder="输入文本,限100字")
    # 创建一个Button组件，用于清除聊天历史
    sub = gr.Button("sub")
    # 创建一个Button组件，用于清除聊天历史
    clear = gr.Button("Clear")
   
    # 定义处理用户消息的函数
    def user(user_message, history):
   
    #     # 返回空字符串（不更新文本框），并将用户消息添加到聊天历史中
    #    if len(user_message)==0:
            
    #         return "",history
    #    if len(user_message)>100:
    #         return "",history
    #    bot_message = call_with_stream(user_message) #random.choice(["How are you?", "I love you", "I'm very hungry"])   
    #    history.append((user_message, bot_message))
    #   return "", history
       print(str(history))
       print("-----")
       return "", history + [[user_message, None]]
       
    #流式输出
    def bot(history):
        print(str(history[-1]))
        print(len(history[-1][0]))
        if len(history[-1][0])==0:
            history.pop()
                
            yield history
            return False
        messages = [{"role": "user", "content": history[-1][0]}]
        responses = Generation.call(
            Generation.Models.qwen_turbo,
            api_key='sk-Oyqwg6ikAO',
            messages=messages,
            result_format='message',  # set the result to be "message" format.
            stream=True,
            incremental_output=True  # get streaming output incrementally
        )
        #full_content = ''  # with incrementally we need to merge output.
        history[-1][1] = ""
        for response in responses:
            if response.status_code == HTTPStatus.OK:
                #full_content += response.output.choices[0]['message']['content']
                if response.output.choices[0]['message']['content']:
                    history[-1][1] += response.output.choices[0]['message']['content']
                    yield history
                #print(response)
            else:
                print('Request id: %s, Status code: %s, error code: %s, error message: %s' % (
                    response.request_id, response.status_code,
                    response.code, response.message
                ))
    # 将用户输入的消息提交给user函数，并且立即更新聊天历史
    # 然后调用bot函数，进一步更新聊天历史，添加机器人的回复
    # queue=False参数确保这些更新不会被放入队列，而是立即执行

    sub.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot,chatbot,chatbot)
    # 当点击清除按钮时，执行一个空操作（lambda: None），并且立即清除聊天历史
    clear.click(lambda: None, None, chatbot, queue=False)

# 启用事件队列系统
demo.queue()
# 启动Gradio应用程序
if __name__ == "__main__":
    demo.launch(auth=("kejiangtao", '654321'))