import torch
from torch import nn
from torchvision import models


class CustomResNet18(nn.Module):
    def __init__(self, pretrained=True):
        super().__init__()
        # 加载ImageNet预训练的ResNet-18
        original_model = models.resnet18(weights='IMAGENET1K_V1' if pretrained else None)

        # 创新点1：修改首层卷积适配32x32输入
        original_model.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
        # ResNet-18的原始层
        self.features = nn.Sequential(
            original_model.conv1,
            original_model.bn1,
            original_model.relu,
            original_model.maxpool,
            original_model.layer1,
            original_model.layer2,
            original_model.layer3,
            original_model.layer4,
            original_model.avgpool
        )
        # 替换全连接层
        self.classifier = nn.Sequential(
            nn.Linear(512, 256),
            nn.BatchNorm1d(256),
            nn.ReLU(),
            nn.Dropout(0.3),
            nn.Linear(256, 10)
        )

        # 初始化新层
        nn.init.kaiming_normal_(self.classifier[0].weight)
        nn.init.kaiming_normal_(self.classifier[4].weight)

    def forward(self, x):
        x = self.features(x)
        x = torch.flatten(x, 1)
        x = self.classifier(x)
        return x

# 使用CustomResNet18解冻
def freeze_layers(model, num_unfreeze=0):
    # 获取所有可解冻的层（按从浅到深顺序）
    all_layers = [
        model.features[0],  # 修改后的conv1（3x3卷积）
        model.features[1],  # bn1
        model.features[4],  # layer1
        model.features[5],  # layer2
        model.features[6],  # layer3
        model.features[7]  # layer4
    ]

    # 冻结除最后num_unfreeze层外的所有参数
    total_layers = len(all_layers)
    for idx, layer in enumerate(all_layers):
        for param in layer.parameters():
            param.requires_grad = (idx >= total_layers - num_unfreeze)

    # 解冻首层conv、bn（输入层）
    for param in model.features[0].parameters():
        param.requires_grad = True
    for param in model.features[1].parameters():
        param.requires_grad = True
    # 始终解冻分类器（输出层）
    for param in model.classifier.parameters():
        param.requires_grad = True

class CustomResNet18_2(nn.Module):
    def __init__(self, pretrained=True):
        super().__init__()
        # 加载ImageNet预训练的ResNet-18
        original_model = models.resnet18(weights='IMAGENET1K_V1' if pretrained else None)

        # 创新点1：修改首层卷积适配32x32输入
        original_model.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1)
        # ResNet-18的原始层
        self.features = nn.Sequential(
            original_model.conv1,
            original_model.bn1,
            original_model.relu,
            original_model.layer1,
            original_model.layer2,
            original_model.layer3,
            original_model.layer4,
            original_model.avgpool
        )
        # 替换全连接层
        self.classifier = nn.Sequential(
            nn.Linear(512, 10)
        )

    def forward(self, x):
        x = self.features(x)
        x = torch.flatten(x, 1)
        x = self.classifier(x)
        return x

# 使用CustomResNet18_2解冻
def freeze_layers_2(model, num_unfreeze=0):
    # 获取所有可解冻的层（按从浅到深顺序）
    all_layers = [
        model.features[0],  # 修改后的conv1（3x3卷积）
        model.features[1],  # bn1
        model.features[3],  # layer1
        model.features[4],  # layer2
        model.features[5],  # layer3
        model.features[6]  # layer4
    ]

    # 冻结除最后num_unfreeze层外的所有参数
    total_layers = len(all_layers)
    for idx, layer in enumerate(all_layers):
        for param in layer.parameters():
            param.requires_grad = (idx >= total_layers - num_unfreeze)

    # 解冻首层conv、bn（输入层）
    for param in model.features[0].parameters():
        param.requires_grad = True
    for param in model.features[1].parameters():
        param.requires_grad = True
    # 始终解冻分类器（输出层）
    for param in model.classifier.parameters():
        param.requires_grad = True


if __name__ == "__main__":
    res_net18 = CustomResNet18_2()
    # res_net18 = CustomResNet18()
    print(res_net18)
    print('-'*40)
