import os
import torch

class Net(torch.nn.Module):
    def __init__(self, activation='selu', normalization='bn'):
        super(Net, self).__init__()
        self.activation = activation
        self.normalization = normalization
        
        # 定义卷积层和池化层
        self.conv1 = torch.nn.Sequential(
            torch.nn.Conv2d(1, 10, kernel_size=5),
            torch.nn.MaxPool2d(kernel_size=2),
        )
        self.conv2 = torch.nn.Sequential(
            torch.nn.Conv2d(10, 20, kernel_size=5),
            torch.nn.MaxPool2d(kernel_size=2),
        )
        
        # 定义全连接层
        self.fc = torch.nn.Sequential(
            torch.nn.Linear(320, 50),
            torch.nn.Linear(50, 10),
        )

    def _get_activation(self):
        if self.activation == 'relu':
            return torch.nn.ReLU()
        elif self.activation == 'selu':
            return torch.nn.SELU()
        elif self.activation == 'elu':
            return torch.nn.ELU()
        elif self.activation == 'gelu':
            return torch.nn.GELU()
        else:
            raise ValueError(f'Unknown activation function: {self.activation}')

    def _get_normalization(self, num_features, height, width):
        if self.normalization == 'bn':
            return torch.nn.BatchNorm2d(num_features)
        elif self.normalization == 'ln':
            return torch.nn.LayerNorm([num_features, height, width])  # 要确保这个shape合理
        elif self.normalization == 'gn':
            return torch.nn.GroupNorm(num_groups=5, num_channels=num_features)
        else:
            return torch.nn.Identity()

    def forward(self, x):
        batch_size = x.size(0)
        
        # 第一层卷积和池化
        x = self.conv1(x)
        conv1_height, conv1_width = x.shape[2], x.shape[3]  # 获取卷积输出的高度和宽度
        norm_layer1 = self._get_normalization(10, conv1_height, conv1_width).to(x.device)  # 确保layer在同一设备上
        x = norm_layer1(x)  # 应用归一化
        x = self._get_activation()(x)  # 应用激活函数

        # 第二层卷积和池化
        x = self.conv2(x)
        conv2_height, conv2_width = x.shape[2], x.shape[3]  # 获取第二层卷积输出的尺寸
        norm_layer2 = self._get_normalization(20, conv2_height, conv2_width).to(x.device)  # 确保layer在同一设备上
        x = norm_layer2(x)  # 应用归一化
        x = self._get_activation()(x)  # 应用激活函数

        x = x.view(batch_size, -1)  # 展平
        x = self.fc(x)
        return x

    def save(self, save_dir, name):
        log_dir = os.path.join(save_dir, name)
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
        torch.save({'model': self.state_dict()}, os.path.join(log_dir, 'model.pt'))

    def load(self, log_dir):
        checkpoint = torch.load(os.path.join(log_dir, 'model.pt'))
        self.load_state_dict(checkpoint['model'])
