import torch.nn as nn


class LeNet5(nn.Module):
    def __init__(self):
        super().__init__()
        self.c1 = nn.Conv2d(3, 32, 5)       # 224 -> 220
        self.bn1 = nn.BatchNorm2d(32)
        self.pool = nn.MaxPool2d(2, 2)      # 220 -> 110
        self.c2 = nn.Conv2d(32, 64, 5)      # 110 -> 106
        self.bn2 = nn.BatchNorm2d(64)
        self.c3 = nn.Conv2d(64, 120, 5)     # 106 -> 102
        self.bn3 = nn.BatchNorm2d(120)
        self.c4 = nn.Conv2d(120, 240, 5)    # 102 -> 98
        self.bn4 = nn.BatchNorm2d(240)
        self.c5 = nn.Conv2d(240, 480, 5)    # 98 -> 94
        self.bn5 = nn.BatchNorm2d(480)
        self.pool2 = nn.MaxPool2d(2, 2)     # 94 -> 47

        self.relu = nn.ReLU()
        self.adaptive_pool = nn.AdaptiveAvgPool2d((6, 6))  # 480 x 6 x 6
        self.dropout = nn.Dropout(0.5)
        self.flatten = nn.Flatten()
        self.f1 = nn.Linear(480 * 6 * 6, 256)
        self.f2 = nn.Linear(256, 45)

    def forward(self, x):
        x = self.relu(self.bn1(self.c1(x)))
        x = self.pool(x)
        x = self.relu(self.bn2(self.c2(x)))
        x = self.relu(self.bn3(self.c3(x)))
        x = self.pool2(x)
        x = self.relu(self.bn4(self.c4(x)))
        x = self.relu(self.bn5(self.c5(x)))
        x = self.pool2(x)
        x = self.adaptive_pool(x)
        x = self.flatten(x)
        x = self.dropout(self.relu(self.f1(x)))
        x = self.f2(x)
        return x
