from typing import TypedDict, Annotated

from langchain_core.messages import SystemMessage
from langchain_core.runnables import RunnableConfig
from langchain_ollama import ChatOllama
from langgraph.constants import START, END
from langgraph.graph import add_messages, StateGraph

from owl_ai.domain.chat_entity import Agent


class State(TypedDict):
    messages: Annotated[
        list,
        add_messages
    ]
    agent: Agent


def chat_llm_generate(llm_config: dict, tools: list = None):
    llm_type = llm_config.get("type")
    if llm_type == 'ollama':
        base_url = llm_config.get("url")
        model = llm_config.get("modelName")
        params = llm_config.get("params", {})

        chat_llm = ChatOllama(
            base_url=base_url,
            model=model,
            verbose=True,
            stream=False if tools is not None else True,
            **params
        )
        return chat_llm
    return None


def agent_chat_llm(state: State, config: RunnableConfig):
    agent_config = state['agent'].config
    llm_config = agent_config.get("llm")
    tools = agent_config.get("tools")

    chat_llm = chat_llm_generate(llm_config, tools)

    system_prompt = agent_config.get("systemPrompt")
    messages = [SystemMessage(content=system_prompt)]
    messages.extend(state["messages"])

    ai_message = chat_llm.invoke(messages)
    return {
        "messages": [
            ai_message
        ]
    }


def chatLLM(state: State, config):
    chat_llm = ChatOllama(base_url="http://182.148.114.142:11434",
                          model="qwen2.5:14b-instruct",
                          verbose=True, stream=True,
                          temperature="0.1",
                          num_predict=16384,
                          num_ctx=16384)
    messages = state["messages"]
    ai_message = chat_llm.invoke(messages)
    return {
        "messages": [
            ai_message
        ]
    }


class AgentFlowService:

    @classmethod
    def compile(cls, agent: Agent):
        agent_type = agent.agent_type
        if agent_type == 'agent':
            return cls.agent_compile()
        elif agent_type == 'app':
            return cls.app_compile(agent)
        return None

    @classmethod
    def agent_compile(cls):
        """
        智能体编译
        """
        graph = StateGraph(State)

        graph.add_node("chat_node", agent_chat_llm)
        graph.add_edge(START, "chat_node")
        graph.add_edge("chat_node", END)

        return graph.compile()

    @classmethod
    def app_compile(cls, agent: Agent):
        """
        应用编译
        """
        pass
