from typing import TypedDict, Annotated, Callable

from langchain_core.messages import SystemMessage
from langchain_core.runnables import RunnableConfig
from langgraph.constants import START, END
from langgraph.graph import add_messages, StateGraph

from owl_ai.domain.agent_config_entity import AgentConfigEntity
from owl_ai.service.graph.graph_compile import GraphCompile


def chat_llm_generate(config: dict):
    """
    聊天LLM节点，用于生成聊天LLM的响应
    """
    llm_type = config.get("type")

    base_url = config.get("baseUrl")
    model_name = config.get("modelName")
    stream = config.get("stream")

    chat_llm = None
    if llm_type == "ollama":
        from langchain_ollama import ChatOllama
        chat_llm = ChatOllama(
            base_url=base_url,
            model=model_name,
            disable_streaming=False
        )
    elif llm_type == "openai":
        from langchain_openai import ChatOpenAI
        chat_llm = ChatOpenAI(
            base_url=base_url,
            model_name=model_name,
            disable_streaming=False
        )
    return chat_llm


class ChatLLMState(TypedDict):
    messages: Annotated[list, add_messages]
    agent_config: AgentConfigEntity


class ChatLLMNode(Callable):

    def __init__(self, agent_config: AgentConfigEntity):
        self.agent_config = agent_config
        self.chat_llm = chat_llm_generate(agent_config.config)
        self.system_prompt = agent_config.config.get("systemPrompt")

    def __call__(self, state: ChatLLMState, config: RunnableConfig):
        llm_messages = []
        llm_messages.extend(state.get("messages"))
        llm_messages.append(SystemMessage(content=self.system_prompt))
        ai_message = self.chat_llm.invoke(llm_messages)

        return {
            "messages": [
                ai_message
            ]
        }


class ChatLLMGraphCompile(GraphCompile):
    """
    基本的聊天LLM图编译，该类仅用于基本的ChatLLM图的编译，不包含任何的工具节点
    """

    @classmethod
    def compile(cls, config: AgentConfigEntity):
        graph = StateGraph(ChatLLMState)
        graph.add_node("chat_llm", ChatLLMNode(config))

        graph.add_edge(START, "chat_llm")
        graph.add_edge("chat_llm", END)

        return graph.compile()
