import numpy as np
from core.engine.DictTensor import DictTensor
from constants import LAYER_MAP


class LayerMeta(type):
    def __new__(cls, name, bases, dct):
        new_cls = super().__new__(cls, name, bases, dct)
        new_cls._LAYER_DISPATCHER = new_cls._build_layer_dispatcher()
        return new_cls


class DynamicLayerManager(metaclass=LayerMeta):
    _start = 9
    _end = 91

    # 类变量，在类加载时初始化一次
    @classmethod
    def _build_layer_dispatcher(cls):
        print(f"正在为 {cls.__name__} 构建 _LAYER_DISPATCHER...")  # 调试信息
        dispatcher = {}
        for (start, end), (key, _) in LAYER_MAP.items():
            method_name = f'_get_{key}_layer'
            if hasattr(cls, method_name):
                dispatcher[(start, end)] = getattr(cls, method_name)
            else:
                raise AttributeError(f"未找到方法: {method_name}")
        return dispatcher

    def __init__(self, gua_dict):
        self.tensor = DictTensor(gua_dict)
        self._dynamic = {}

    def update_dynamic_plane(self, index: int, data: np.ndarray):
        """
        更新动态层平面
        """
        if data.shape != (3, 6):
            raise ValueError("数据形状必须为(3,6)")
        self._dynamic[index] = data

    def get_dynamic(self, i) -> np.ndarray:
        assert 0 <= i < self._end - self._start
        if i not in self._dynamic:
            self._dynamic[i] = self._generate_layer(i)
        return self._dynamic[i]

    def _generate_layer(self, i) -> np.ndarray:
        for (start, end), func in self._LAYER_DISPATCHER.items():
            if start <= i < end:
                return func(self, i - start)
        raise ValueError(f"索引超出范围: {i}")

    def _get_yinyang_layer(self, index):
        return (self.tensor.yinyang == index + 1).astype(np.uint8)

    def _get_block_layer(self, index):
        return (self.tensor.block == index).astype(np.uint8)

    def _get_wx_layer(self, index):
        #  - 6
        return (self.tensor.wx == index).astype(np.uint8)

    def _get_zhi_layer(self, index):
        #  - 11
        return (self.tensor.zhi == index).astype(np.uint8)

    def _get_vary_layer(self, index):
        return (self.tensor.vary == 1).astype(np.uint8)

    def _get_wx_alter_layer(self, index):
        # - 24
        return (self.tensor.wx_alter() == index).astype(np.uint8)

    def _get_wx_vary_layer(self, index):
        #  - 29
        return self.tensor.wx_vary(index)

    def _get_zhi_alter_layer(self, index):
        # - 34
        mode = 'zhiDiff' if index < 12 else 'zhiSum'
        idx = index if mode == 'zhiDiff' else index - 12
        return self.tensor.zhi_alter(mode) == idx

    def _get_zhi_vary_layer(self, index):
        # - 58
        mode = 'zhiDiff' if index < 12 else 'zhiSum'
        idx = index if mode == 'zhiDiff' else index - 12
        return self.tensor.zhi_vary(mode,compare=idx)

    '''暂时弃用的方法'''

    def __generate_layer(self, i) -> np.ndarray:
        '''
        0-1：对应 阴阳
        2-5：对应 安静发动变伏
        6-10：对应 金水木火土 五行
        11-22：对应 亥子丑寅卯辰巳午未申酉戌 地支
        23: 选中动爻及其变爻
        '''
        if i < 2:
            mask = (self.tensor.yinyang == i + 1).astype(np.uint8)  # 阳=1，阴=2
        elif i < 6:
            mask = (self.tensor.block == i - 2).astype(np.uint8)  # 安静=0，发动=1，变卦=2，伏藏=3
        elif i < 11:
            mask = (self.tensor.wx == i - 6).astype(np.uint8)  # 金=0，水=1，
        elif i < 23:
            mask = (self.tensor.zhi == i - 11).astype(np.uint8)  # 亥=0，子=1，
        elif i == 23:
            mask = (self.tensor.vary == 1).astype(np.uint8)
        elif i < 29:
            # 这是回头生克扶的，对每个动爻，&
            # '回头扶': 33,24
            # '回头生': 34,25
            # '回头克': 35,26
            # '回头耗': 36,27
            # '回头泄': 37,28
            mask = (self.tensor.wx_alter() == i - 24).astype(np.uint8)
        elif i < 34:
            '''动爻生，动爻克，但是需要指定是哪个数字？'''
            mask = self.tensor.wx_vary(i - 29)
        elif i < 58:
            ''' zhi_alter 变爻对动爻的冲合害，化进化退，化差化合'''
            index = i - 34
            mask = self.tensor.zhi_alter('zhiDiff' if index < 12 else 'zhiSum') == index % 12
        elif i < 82:
            index = i - 58
            mask = self.tensor.zhi_vary('zhiDiff' if index < 12 else 'zhiSum') == index % 12
        else:
            raise ValueError("索引超出范围")

        return mask


def build_shared_fixed():
    # 预定义的固定层矩阵
    sf = np.zeros((9, 3, 6), dtype=np.uint8)
    for i in range(9):
        if i < 6:  # from 0：六爻=0，五爻=1，
            for j in range(3):
                sf[i][j][i] = 1
        elif i < 8:  # from 6：外卦=1，内卦=1
            for j in range(3):
                for k in range(3):
                    sf[i][j][i // 7 * 3 + k] = 1
        else:  # 8：主卦=1
            for k in range(6):
                sf[i][0][k] = 1
    return sf
