import chainlit as cl
from customer_service_system import CustomerServiceSystem
from utils.enum_class import LLMType, EmbeddingType

# chainlit 参考文档：https://docs.chainlit.io/


@cl.on_chat_start
async def start():
    # 调用HuggingFace的本地LLM模型
    # mysystem = CustomerServiceSystem(
    #     LLMType.Local_HuggingFace,
    #     EmbeddingType.Local_HuggingFace,
    #     llm_model_name="deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
    #     device_map="cuda",
    # )

    # 调用OpenRouter的LLM模型
    # mysystem = CustomerServiceSystem(
    #     LLMType.Online_OpenRouter,
    #     EmbeddingType.Local_HuggingFace,
    # )

    # 调用硅基流动的LLM模型
    mysystem = CustomerServiceSystem(
        LLMType.Online_OpenAILike,
        EmbeddingType.Local_HuggingFace,
        llm_model_name="deepseek-ai/DeepSeek-V3",
        api_base="https://api.siliconflow.cn/v1",
        is_function_calling_model=True,
    )

    # 创建主查询引擎（路由器查询引擎），显式启用流式输出
    query_engine = mysystem.create_query_engine("./storage", streaming=True)
    cl.user_session.set("query_engine", query_engine)

    # 创建备用向量查询引擎，显式启用流式输出
    backup_query_engine = mysystem.create_vector_query_engine(
        "./storage", streaming=True
    )
    cl.user_session.set("backup_query_engine", backup_query_engine)

    await cl.Message(
        author="Assistant", content="欢迎使用AI电商客服系统，请说出你的疑问。"
    ).send()


@cl.on_message
async def main(message: cl.Message):
    query_engine = cl.user_session.get("query_engine")
    backup_query_engine = cl.user_session.get("backup_query_engine")

    msg = cl.Message(content="", author="Assistant")

    try:
        # 尝试使用路由查询引擎
        res = query_engine.query(message.content)
        # 仅使用流式输出打印
        for chuck in res.response_gen:
            await msg.stream_token(chuck)

        for node in res.source_nodes:
            print(f"\n来源文档：{node.metadata.get('file_name')}")
            print(f"文档最后修改时间：{node.metadata.get('last_modified_date')}")
            print(f"置信度：{node.score:.2f}")

    except Exception as e:
        system_msg = cl.Message(
            content=f"系统遇到问题：{type(e).__name__}，正在尝试备用方案...",
            author="System",
        )
        await system_msg.send()

        try:
            # 尝试使用备用向量查询引擎
            res = backup_query_engine.query(message.content)
            # 仅使用流式输出打印
            for chuck in res.response_gen:
                await msg.stream_token(chuck)

            for node in res.source_nodes:
                print(f"\n来源文档：{node.metadata.get('file_name')}")
                print(f"文档最后修改时间：{node.metadata.get('last_modified_date')}")
                print(f"置信度：{node.score:.2f}")
        except Exception as e:
            # 所有方法都失败的情况
            await msg.stream_token(
                f"很抱歉，系统无法处理您的请求。错误信息：{type(e).__name__}: {str(e)}"
            )

    await msg.send()


# 运行方式：
# 使用chainlit运行：chainlit run customer_service_system_ui.py -w
