import torch
from torch import nn
from d2l import torch as d2l

class BatchNorm(nn.Module):
    def __init__(self,num_features,num_dims):
        super.__init__()
        if num_dims == 2:
            shape = (1,num_features)
        else:
            shape = (1,num_features,1,1)
        self.gamma = nn.Parameter(torch.ones(shape))
        self.beta = nn.Parameter(torch.zeros(shape))
        self.moving_mean = torch.zeros(shape)
        self.moving_variance = torch.ones(shape)
    
    def forward(self,X):
        if self.moving_mean.device != X.device:
            self.moving_mean = self.moving_mean.to(X.device)
            self.moving_variance = self.moving_variance.to(X.device)
        Y,self.moving_mean,self.moving_variance = self.batch_norm(
            X,self.gamma,self.beta,self.moving_mean,self.moving_variance,1e-5,0.9)
        return Y
    
    def batch_norm(X,gamma,beta,moving_mean,moving_variance,epsilon,momentum):
        if not torch.is_grad_enabled():
            X_hat = (X - moving_mean)/torch.sqrt(moving_variance + epsilon)
        else:
            assert len(X.shape) in (2,4)
            #全连接：(batch_size,feature)
            #2D卷积：(batch_size,channels,height,width)
            if len(X.shape) == 2:
                mean = X.mean(dim=0)#压扁
                variance = ((X-mean)**2).mean(dim=0)
            else:
                mean = X.mean(dim=(0,2,3),keepdim=True)
                variance = ((X-mean)**2).mean(dim=(0,2,3),keepdim=True)
            X_hat = (X - mean)/torch.sqrt(variance + epsilon)
            moving_mean = momentum * moving_mean + (1 - momentum) * mean
            moving_variance = momentum * moving_variance + (1 - momentum) * variance
        Y = gamma * X_hat + beta
        return Y,moving_mean.data,moving_variance.data
    
net = nn.Sequential(
    nn.Conv2d(1,6,kernel_size=5),BatchNorm(6,num_dims=4),nn.Sigmoid(),
    nn.MaxPool2d(kernel_size=2,stride=2),

    nn.Conv2d(6,16,kernel_size=5),BatchNorm(16,num_dims=4),nn.Sigmoid(),
    nn.MaxPool2d(kernel_size=2,stride=2),
    
    nn.Flatten(),
    nn.Linear(16*4*4,120),BatchNorm(120,num_dims=2),nn.Sigmoid(),
    nn.Linear(120,84),BatchNorm(84,num_dims=2),nn.Sigmoid(),
    nn.Linear(84,10),
)

    



