class ResNet(nn.Module):
    def __init__(self, ResidualBlock, num_classes=10):
        """
        ResNet-18网络初始化
        Args:
            ResidualBlock: 残差块类型
            num_classes: 输出类别数
        """
        super(ResNet, self).__init__()
        self.inchannel = 64  # 初始通道数
        
        # 初始卷积层：快速提取底层特征
        self.conv1 = nn.Sequential(
            nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False),
            nn.BatchNorm2d(64),
            nn.ReLU(inplace=True)
        )
        
        # 四个残差阶段，通道数逐级倍增
        self.layer1 = self.make_layer(ResidualBlock, 64, 2, stride=1)   # 第一阶段
        self.layer2 = self.make_layer(ResidualBlock, 128, 2, stride=2)  # 下采样
        self.layer3 = self.make_layer(ResidualBlock, 256, 2, stride=2)  # 下采样
        self.layer4 = self.make_layer(ResidualBlock, 512, 2, stride=2)  # 下采样
        
        # 分类层
        self.fc = nn.Linear(512, num_classes)
    
    def make_layer(self, block, channels, num_blocks, stride):
        """
        构建残差阶段
        Args:
            block: 残差块类型
            channels: 输出通道数
            num_blocks: 块数量
            stride: 首个块步长
        """
        # 步长序列：首个块下采样，后续块保持尺寸
        strides = [stride] + [1] * (num_blocks - 1)
        layers = []
        
        for stride in strides:
            layers.append(block(self.inchannel, channels, stride))
            self.inchannel = channels  # 更新通道数
        
        return nn.Sequential(*layers)  # 转换为序列模块
    
    def forward(self, x):
        """前向传播过程"""
        # 特征提取路径
        out = self.conv1(x)      # 初始卷积：尺寸不变
        out = self.layer1(out)   # 第一阶段：尺寸不变
        out = self.layer2(out)   # 第二阶段：尺寸减半
        out = self.layer3(out)   # 第三阶段：尺寸减半
        out = self.layer4(out)   # 第四阶段：尺寸减半
        
        # 分类路径
        out = F.avg_pool2d(out, 4)       # 全局平均池化
        out = out.view(out.size(0), -1)  # 特征展平
        out = self.fc(out)               # 全连接分类
        return out
