import os
import asyncio
from dotenv import load_dotenv
from langchain.chat_models import init_chat_model
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
import gradio as gr


async def chat_response(message, history):
    """流式生成AI回应"""
    partial_message = ""
    async for chunk in chain.astream({"input": message}):
        partial_message += chunk
        yield partial_message


def create_chatbot():
    css = """
    """
    with gr.Blocks(title="DeepSeek Chat", css=css) as demo:
        gr.Markdown("## 小智聊天机器人")
        chatbot = gr.Chatbot()
        with gr.Row():
            msg = gr.Textbox(label="输入消息", placeholder="请输入消息")
            submit = gr.Button("发送")
            clear = gr.Button("Clear")

        async def respond(message, history):
            if not message.strip():
                yield "", message
                return
            history = history + [(message, None)]
            yield "", history

            async for response in chat_response(message, history):
                history[-1] = (history[-1][0], response)
                yield "", history

        def clear_history():
            return [], ""

        msg.submit(respond, [msg, chatbot], [msg, chatbot])
        submit.click(respond, [msg, chatbot], [msg, chatbot])
        clear.click(clear_history, outputs=[chatbot, msg])
    return demo


if __name__ == '__main__':
    load_dotenv(override=True)

    prompt = ChatPromptTemplate.from_messages(
        [("system", "你叫做小智，是一个乐于助人的助手，请根据用户的输入的问题进行回答"),
         ("human", "{input}")],
    )

    DEEPSEEK_API_KEY = os.getenv("DEEPSEEK_API_KEY")
    model = init_chat_model(model="deepseek-chat", model_provider="deepseek")

    parser = StrOutputParser()
    chain = prompt | model | parser
    demo = create_chatbot()
    demo.launch(
        server_name="0.0.0.0",
        server_port=9191,
        share=True,
        inbrowser=True,
        debug=True,
    )
