import torch
from torch import nn


class Flatten(nn.Module):
    def forward(self, x):
        return x.view(x.size(0), -1)

# 输入 128 * 128 * 1
class LeNet5(nn.Module):
    def __init__(self):
        super(LeNet5, self).__init__()

        self.network = nn.Sequential(
            nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=0),  # 128 * 128 * 1 --> 124 * 124 * 16
            nn.ReLU(),
            nn.MaxPool2d(kernel_size=2, stride=2),  # 124 * 124 * 16 --> 62 * 62 * 16

            nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=0),  # 62 * 62 * 16 --> 58 * 58 * 32
            nn.ReLU(),
            nn.MaxPool2d(kernel_size=2, stride=2),  # 58 * 58 * 32 --> 29 * 29 * 32

            nn.Conv2d(in_channels=32, out_channels=64, kernel_size=3, stride=1, padding=0), # 29 * 29 * 32 ---> 27 * 27 * 27 * 64
            nn.ReLU(),
            nn.MaxPool2d(kernel_size=2, stride=2),  # 27 * 27 * 27 * 64 --> 13 * 13 * 64

            # batch_size 0, channels 1, height 2, weight 3
            nn.Flatten(start_dim=1, end_dim=-1),  # batch_size * 13 * 13 * 64 --> batch_size * (13 * 13 * 64=10816) 展开

            nn.Linear(in_features=13 * 13 * 64, out_features=256),  # 13456--> 1 * 120
            nn.Dropout(0.5),
            nn.Linear(in_features=256, out_features=10)
        )

    def forward(self, input_data):
        output = self.network(input_data)
        return output


if __name__ == '__main__':
    model = LeNet5()
    x = torch.randn(1, 1, 128, 128)
    y = model(x)
    print(y.shape)
