import gradio as gr
import requests
import json

def generate_text(input_text, chat_history, temperature, top_p, top_k, max_tokens):
    url = "http://127.0.0.1:1025/v1/chat/completions"
    headers = {
        "Accept": "application/json",
        "Content-Type": "application/json"
    }
    
    # 构建对话历史
    messages = [
        {
            "role": "system",
            "content": """你是一个友好且知识渊博的AI助手。"""
        }
    ]
    
    # 添加之前的对话历史
    for entry in chat_history:
        messages.append({"role": "user", "content": entry[0]})
        if entry[1] is not None:
            messages.append({"role": "assistant", "content": entry[1]})
    
    # 添加当前用户输入
    messages.append({"role": "user", "content": input_text})
    
    payload = {
        "model": "qwen2",
        "max_tokens": 512,
        "messages": messages,
        "max_tokens": max_tokens,
        "presence_penalty": 1.03,
        "frequency_penalty": 1.0,
        "temperature": temperature,
        "top_p": top_p,
        "top_k": top_k,
        "stream": False
    }
    
    response = requests.post(url, headers=headers, data=json.dumps(payload))
    if response.status_code == 200:
        result = response.json()
        return result['choices'][0]['message']['content']
    else:
        return f"Error: {response.status_code}"

def user(message, chat_history):
    return "", chat_history + [[message, None]]

def bot(chat_history, temperature, top_p, top_k, max_tokens):
    user_message = chat_history[-1][0]
    bot_response = generate_text(user_message, chat_history[:-1], temperature, top_p, top_k, max_tokens)
    chat_history[-1][1] = bot_response
    return chat_history

examples = [
    "帮我写一首500字的诗",
    "你好",
    "睡不着怎么办？",
    "你是谁？"
]

with gr.Blocks(theme=gr.themes.Soft(), css=".disclaimer {font-variant-caps: all-small-caps;}") as demo:
    gr.Markdown(f"""<h1><center>大模型测试</center></h1>""")
    gr.Markdown(f"Base URL: http://127.0.0.1:7860")
    
    chatbot = gr.Chatbot(height=500)
    with gr.Row():
        with gr.Column():
            msg = gr.Textbox(
                label="Chat Message Box",
                placeholder="Chat Message Box",
                show_label=False,
                container=False,
            )
        with gr.Column():
            with gr.Row():
                submit = gr.Button("Submit")
                stop = gr.Button("Stop")
                clear = gr.Button("Clear")
    with gr.Row():
        with gr.Accordion("Advanced Options:", open=False):
            with gr.Row():
                with gr.Column():
                    with gr.Row():
                        temperature = gr.Slider(
                            label="Temperature",
                            value=0.1,
                            minimum=0.0,
                            maximum=1.0,
                            step=0.1,
                            interactive=True,
                            info="Higher values produce more diverse outputs",
                        )
                with gr.Column():
                    with gr.Row():
                        top_p = gr.Slider(
                            label="Top-p (nucleus sampling)",
                            value=1.0,
                            minimum=0.0,
                            maximum=1,
                            step=0.01,
                            interactive=True,
                            info=(
                                "Sample from the smallest possible set of tokens whose cumulative probability "
                                "exceeds top_p. Set to 1 to disable and sample from all tokens."
                            ),
                        )
                with gr.Column():
                    with gr.Row():
                        top_k = gr.Slider(
                            label="Top-k",
                            value=50,
                            minimum=0.0,
                            maximum=200,
                            step=1,
                            interactive=True,
                            info="Sample from a shortlist of top-k tokens — 0 to disable and sample from all tokens.",
                        )
                with gr.Column():
                    with gr.Row():
                        max_tokens = gr.Slider(
                            label="Max Tokens",
                            value=500,
                            minimum=10,
                            maximum=1000,
                            step=10,
                            interactive=True,
                            info="Maximum number of tokens to generate.",
                        )
    gr.Examples(examples, inputs=msg, label="Click on any example and press the 'Submit' button")

    submit_event = msg.submit(
        fn=user,
        inputs=[msg, chatbot],
        outputs=[msg, chatbot],
        queue=False,
    ).then(
        fn=bot,
        inputs=[
            chatbot,
            temperature,
            top_p,
            top_k,
            max_tokens,
        ],
        outputs=chatbot,
        queue=True,
    )
    submit_click_event = submit.click(
        fn=user,
        inputs=[msg, chatbot],
        outputs=[msg, chatbot],
        queue=False,
    ).then(
        fn=bot,
        inputs=[
            chatbot,
            temperature,
            top_p,
            top_k,
            max_tokens,
        ],
        outputs=chatbot,
        queue=True,
    )
    stop.click(
        fn=None,
        inputs=None,
        outputs=None,
        cancels=[submit_event, submit_click_event],
        queue=False,
    )
    clear.click(lambda: None, None, chatbot, queue=False)

demo.launch(share=True)