from typing import Callable


class BaseGraphNode(Callable):

    @classmethod
    def chat_llm_generate(cls, llm_config: dict):
        """
        生成ChatLLM
        Returns: ChatLLM对象
        """
        llm_type = llm_config.get("llmType")

        base_url = llm_config.get("baseUrl")
        model = llm_config.get("model")
        if "ollama" == llm_type:
            from langchain_ollama import ChatOllama

            temperature = llm_config.get("temperature", 0.1)

            chat_llm = ChatOllama(
                base_url=base_url,
                model=model,
                temperature=temperature
            )
            return chat_llm

        elif "openai" == llm_type:
            from langchain_openai import ChatOpenAI

            openai_key = llm_config.get("openaiKey")
            chat_llm = ChatOpenAI(
                base_url=base_url,
                model=model,
                openai_key=openai_key
            )
            return chat_llm
        return None

    @classmethod
    def input_params_assem(cls, state, input_config: dict):
        """
        组装input入参
        Args:
            state: 状态
            input_config: 入参配置

        Returns: 组装好的参数
        """
        node_params = state.get("node_params")

        input_params = {}
        for input_config_param in input_config:
            param_type = input_config_param.get("type")
            key = input_config_param.get("key")
            value = input_config_param.get("value")
            if param_type == 'ref':
                ref_params = value.split(".")
                ref_node = ref_params[0]
                ref_key = ref_params[1]

                ref_node_params = node_params.get(ref_node)
                if ref_node_params:
                    ref_node_value = ref_node_params.get(ref_key)
                    if ref_node_value:
                        input_params.setdefault(key, ref_node_value)
            else:
                input_params.setdefault(key, value)
        return input_params
