import uuid
from dataclasses import dataclass
from operator import itemgetter
from typing import Dict, Any

from injector import inject
from uuid import UUID

from langchain.memory import ConversationBufferWindowMemory
from langchain_community.chat_message_histories import FileChatMessageHistory
from langchain_core.memory import BaseMemory
from langchain_core.runnables import RunnablePassthrough, RunnableLambda, RunnableConfig
from langchain_core.tracers import Run

from internal.schema.app_schema import CompletionReq
from internal.service import AppService
from pkg.response import success_json, validate_error_json, success_message
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from internal.core.tools.builtin_tools.providers import BuiltinProviderManager

@inject
@dataclass
class AppHandler:
    """应用控制器"""
    app_service: AppService
    provider_factory: BuiltinProviderManager

    def create_app(self):
        """调用服务创建新的APP记录"""
        app = self.app_service.create_app()
        return success_message(f"应用已经成功创建，id为{app.id}")

    def get_app(self, id: uuid.UUID):
        app = self.app_service.get_app(id)
        return success_message(f"应用已经成功获取，名字是{app.name}")

    def update_app(self, id: uuid.UUID):
        app = self.app_service.update_app(id)
        return success_message(f"应用已经成功修改，修改的名字是:{app.name}")

    def delete_app(self, id: uuid.UUID):
        app = self.app_service.delete_app(id)
        return success_message(f"应用已经成功删除，id为:{app.id}")


    ## 2️⃣ 使用 prompt
    # def completion(self):
    #     """聊天接口"""
    #     # 1.提取从接口中获取的输入，POST
    #     req = CompletionReq()
    #     if not req.validate():
    #         return validate_error_json(req.errors)
    #
    #     prompt = ChatPromptTemplate.from_template("请根据用户的提问进行回答。 \n{query}")
    #
    #     # # 2.构建OpenAI客户端，并发起请求
    #     # client = OpenAI(base_url=os.getenv("OPENAI_API_BASE"))
    #     llm = ChatOpenAI(model="kimi-k2-0711-preview")
    #
    #     # 3.得到请求响应，然后将OpenAI的响应传递给前端
    #     ai_message = llm.invoke(prompt.format(query=req.query.data))
    #     parser = StrOutputParser()
    #     content = parser.invoke(ai_message)
    #
    #     # completion = client.chat.completions.create(
    #     #     model="kimi-k2-0711-preview",
    #     #     messages=[
    #     #         {"role": "system", "content": "你是 Kimi，由 Moonshot AI 提供的人工智能助手，你更擅长中文和英文的对话。你会为用户提供安全，有帮助，准确的回答。同时，你会拒绝一切涉及恐怖主义，种族歧视，黄色暴力等问题的回答。Moonshot AI 为专有名词，不可翻译成其他语言。"},
    #     #         {"role": "user", "content": req.query.data},
    #     #     ],
    #     #     temperature = 0.6,
    #     # )
    #     #
    #     # content = completion.choices[0].message.content
    #
    #     return success_json({"content": content})

    @classmethod
    def _load_memory_variable(cls, input: Dict[str, Any], config: RunnableConfig) -> Dict[str, Any]:
        """加载记忆变量信息"""
        # 1.从config中获取configurable
        configurable = config.get("configurable", {})
        configurable_memory = configurable.get("memory", None)
        if configurable_memory is not None and isinstance(configurable_memory, BaseMemory):
            return configurable_memory.load_memory_variables(input)
        return {"history": []}


    @classmethod
    def _on_save_end(cls, run_obj: Run, config: RunnableConfig) -> None:
        """存储对应的上下文信息到记忆实体中"""
        configurable = config.get("configurable", {})
        configurable_memory = configurable.get("memory", None)
        if configurable_memory is not None and isinstance(configurable_memory, BaseMemory):
            configurable_memory.save_context(run_obj.inputs, run_obj.outputs)

    ## 2️⃣ 使用 LCEL
    def completion(self):
        """聊天接口"""
        # 1.提取从接口中获取的输入，POST
        req = CompletionReq()
        if not req.validate():
            return validate_error_json(req.errors)

        # 2. 构建prompt与记忆
        prompt = ChatPromptTemplate.from_messages([
            ("system", "你是一个强大的聊天机器人，能根据用户提问回复对应的问题"),
            MessagesPlaceholder("history"),
            ("human", "{query}"),
        ])

        # 3. 构建记忆缓存
        memory = ConversationBufferWindowMemory(
            k=3,
            input_key="query",
            output_key="output",
            return_messages=True,
            chat_memory=FileChatMessageHistory("./storage/memory/chat_memory.txt"),
        )

        llm = ChatOpenAI(model_name="kimi-k2-0711-preview")

        # 4. 创建链
        chain = (RunnablePassthrough.assign(
            history=RunnableLambda(self._load_memory_variable) | itemgetter("history")
        ) | prompt | llm | StrOutputParser()).with_listeners(on_end=self._on_save_end)

        # 4. 调用链得到结果
        chain_input = {"query": req.query.data}
        content = chain.invoke(chain_input, config={"configurable": {"memory": memory}})
        return success_json({"content": content})



    def debug(self, app_id: UUID):
        """聊天接口"""
        # 1.提取从接口中获取的输入，POST
        req = CompletionReq()
        if not req.validate():
            return validate_error_json(req.errors)

        # 2.构建组件
        prompt = ChatPromptTemplate.from_template("{query}")
        llm = ChatOpenAI(model_name="kimi-k2-0711-preview")
        parser = StrOutputParser()

        # 3.构建链
        chain = prompt | llm | parser

        # 4.调用链得到结果
        content = chain.invoke({"query": req.query.data})
        return success_json({"content": content})


    def ping(self):
        # google_serper = self.provider_factory.get_provider("bocha")
        # google_serper_entity = google_serper.get_tool_entity("bocha_search")
        # print(google_serper_entity)
        # print(google_serper_entity.invoke("国庆好玩不"))
        # raise FailException("数据未找到")
        # return {"ping": "pong"}
        return success_message("ok")
