import torch
import torch.nn as nn

class MLP(nn.Module):
    def __init__(self, input_dim, output_dim, hidden_dims=None, activation='relu', dropout=0.0):
        """
        初始化多层感知机（MLP）层
        :param input_dim: 输入维度
        :param output_dim: 输出维度
        :param hidden_dims: 隐藏层的维度列表，例如 [256, 128] 表示两层隐藏层，维度分别为256和128
        :param activation: 激活函数类型，'relu', 'gelu', 'tanh', 等
        :param dropout: Dropout概率，默认为0.0，表示不使用Dropout
        """
        super(MLP, self).__init__()
        layers = []
        if hidden_dims:
            prev_dim = input_dim
            for hidden_dim in hidden_dims:
                layers.append(nn.Linear(prev_dim, hidden_dim))
                layers.append(get_activation(activation))
                if dropout > 0.0:
                    layers.append(nn.Dropout(dropout))
                prev_dim = hidden_dim
            layers.append(nn.Linear(prev_dim, output_dim))
        else:
            layers.append(nn.Linear(input_dim, output_dim))
        
        self.network = nn.Sequential(*layers)
    
    def forward(self, x):
        """
        前向传播
        :param x: 输入张量，形状为 (N, input_dim)
        :return: 输出张量，形状为 (N, output_dim)
        """
        return self.network(x)

def get_activation(activation):
    """
    根据激活函数类型返回相应的激活函数
    :param activation: 激活函数类型字符串
    :return: 激活函数
    """
    if activation == 'relu':
        return nn.ReLU()
    elif activation == 'gelu':
        return nn.GELU()
    elif activation == 'tanh':
        return nn.Tanh()
    elif activation == 'leaky_relu':
        return nn.LeakyReLU()
    elif activation == 'sigmoid':
        return nn.Sigmoid()
    else:
        raise ValueError(f"Unsupported activation function: {activation}")

# 示例使用
if __name__ == "__main__":
    input_dim = 100
    output_dim = 10
    hidden_dims = [256, 128]  # 两层隐藏层，维度分别为256和128
    activation = 'relu'
    dropout = 0.3

    # 创建MLP层
    mlp = MLP(input_dim, output_dim, hidden_dims, activation, dropout)

    # 随机输入
    x = torch.randn(32, input_dim)  # 批量大小为32

    # 前向传播
    output = mlp(x)
    print(output.shape)  # 输出形状应为 (32, 10)