from enum import Enum

from torch import nn
from torch.nn import functional as F


# 这里设置一个枚举类,用于区分不同GA的类型, 例如 Attention,Linear,Conv,ResNet,BatchNorm,数据维度转换(data_dim_trans)
class GA_Type(Enum):
    """定义神经网络中不同模块或操作类型的枚举类"""

    Linear = 1  # 全连接层 (Fully Connected Layer)
    MultiHeadAttention = 2  # 多头注意力机制模块 (Attention Mechanism)
    Conv = 3  # 卷积层 (Convolutional Layer)


class AF(Enum):
    """定义神经网络中常用激活函数类型的枚举类"""

    ReLU = nn.ReLU  # 线性整流单元 (Rectified Linear Unit)
    Sig = F.sigmoid  # Sigmoid 激活函数 (Sigmoid Activation)
    Tanh = nn.Tanh  # 双曲正切激活函数 (Hyperbolic Tangent Activation)
    SM = F.softmax  # Softmax 激活函数 (Softmax Activation)，常用于多分类任务
    LReLU = nn.LeakyReLU  # Leaky ReLU，解决 ReLU 在负区间的梯度消失问题
    ELU = nn.ELU  # 指数线性单元 (Exponential Linear Unit)，允许负值输出
    Swish = nn.SiLU  # 自门控激活函数 (Self-Gated Activation Function)
    GELU = nn.GELU  # 高斯误差线性单元 (Gaussian Error Linear Unit)，适用于 Transformer 等模型
    LayerNorm = nn.LayerNorm  # 层归一化层 (Layer Normalization Layer)
    BatchNorm = nn.BatchNorm2d  # 批归一化层 (Batch Normalization Layer)


class BaseGA9Ind(nn.Module):

    def __init__(self, GA_type: GA_Type = GA_Type.Linear, af: AF = AF.ReLU, drop_out=0.5, *args):
        super(BaseGA9Ind, self).__init__()
        self.drop_out = drop_out
        self.GA_type = GA_type
        self.af = af
        self.args = args

    def forward(self, x, *args):
        result = self.real_forward(x, args)
        result = self.af(result)
        result = nn.Dropout(self.drop_out)(result)
        return result

    def real_forward(self, x, args):
        raise NotImplementedError("GA_Type {} is not implemented".format(self.GA_type))


class GA_ResNet(BaseGA9Ind):
    def __init__(self, before_dim, after_dim, *args):
        '''ResNet
        残差网络

        :param args:
        '''
        super(GA_ResNet, self).__init__(GA_type=GA_Type.ResNet, *args)
        self.before_dim = before_dim
        self.after_dim = after_dim
        self.model = nn.Linear(before_dim, after_dim)

    def forward(self, x, other_GA_ind: BaseGA9Ind):
        x = self.model(x)
        ind = other_GA_ind(x)
        return x + ind


GA_Type.ResNet = GA_ResNet  # 残差网络模块 (Residual Network Block)


class GA_DataDimTrans(BaseGA9Ind):
    def __init__(self, after_dim, *args):
        super(GA_DataDimTrans, self).__init__(GA_type=GA_Type.data_dim_trans, *args)
        self.after_dim = after_dim

    def forward(self, x):
        # 获取输入 Tensor 的形状
        batch_size, *other_dims, input_dim = x.shape

        if self.after_dim > input_dim:
            # 扩展最后一个维度，用零填充
            padding = (0, self.after_dim - input_dim)
            transformed_tensor = F.pad(x, padding)
        else:
            # 收缩最后一个维度，直接截取
            transformed_tensor = x[..., :self.after_dim]

        return transformed_tensor


GA_Type.data_dim_trans = GA_DataDimTrans  # 数据维度转换操作 (Data Dimension Transformation)
