from _typeshed import Self
import torch
from torch import nn
from torch.nn import functional as F
from d2l import torch as d2l

# 残差快
class Residual(nn.Module):
    def __init__(self,input_channels, num_channels, use_1x1conv=False,strides=1):
        super().__init__()
        self.cov1=nn.Conv2d(input_channels,num_channels,
                             kernel_size=3,padding=1,strides=strides)
        self.cov2=nn.Conv2d(num_channels,num_channels,kernel_size=3)
        if use_1x1conv:
            self.cov3=nn.Conv2d(input_channels,num_channels,kernel_size=1,strides=strides)
        else:
            self.cov3=None
        self.bn1=nn.BatchNorm2d(num_channels)
        self.bn2=nn.BatchNorm2d(num_channels)
        self.relu=nn.Relu(inplace=True)
    
    def forward(self,X):
        Y=F.relu(self.bn1(self.cov1(X)))
        Y=self.bn2(self.cov2(Y))
        if self.cov3:
            X=self.cov3(X)
        Y+=X
        return F.relu(Y)


#stage
b1=nn.Sequential(nn.Conv2d(1, 64, kernel_size=7,padding=1),nn.BatchNorm2d(64),nn.Relu(),nn.MaxPool2d(kernel_size=3,stride=2,padding=1))

def resnet_block(input_channels, num_channels,num_residuals,first_block=False):
    blk=[]
    for i in range(num_residuals):
        if i==0 and not first_block:
            #  高宽减半
            blk.append(Residual(input_channels,num_channels, use_1x1conv=True, strides=2))
        else:
            blk.append(Residual(num_channels,num_channels))

    return blk
b2=nn.Sequential(*resnet_block(64,64,2,first_block=True))
b3=nn.Sequential(*resnet_block(64,128,2))
b4=nn.Sequential(*resnet_block(128,256,2))
b5=nn.Sequential(*resnet_block(256,512,2))

net=nn.Sequential(b1,b2,b3,b4,b5,nn.AdaptiveAvgPool2d((1,1),nn.Flatten(),nn.Linear(512,10)))
