import torch
import torch.nn as nn
import torch.nn.functional as F
from torchsummary import summary
class AlexNet(nn.Module):
     def __init__(self):
         super(AlexNet, self).__init__()
         self.features = nn.Sequential(
             nn.Conv2d(in_channels=3, out_channels=96, kernel_size=11, stride=4, padding=1),
             nn.BatchNorm2d(96),
             nn.ReLU(),
             nn.MaxPool2d(kernel_size=3, stride=2),
             nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, padding=2),
             nn.BatchNorm2d(256),
             nn.ReLU(),
             nn.MaxPool2d(kernel_size=3, stride=2),
             nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, padding=1),
             nn.BatchNorm2d(384),
             nn.ReLU(),
             nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, padding=1),
             nn.BatchNorm2d(384),
             nn.ReLU(),
             nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, padding=1),
             nn.BatchNorm2d(256),
             nn.ReLU(),
             nn.MaxPool2d(kernel_size=3, stride=2),
             nn.Flatten(),
             nn.Linear(in_features=256 * 6 * 6, out_features=4096),
             nn.ReLU(),
             nn.Dropout(p=0.5),
             nn.Linear(in_features=4096, out_features=4096),
             nn.ReLU(),
             nn.Dropout(p=0.5),
             nn.Linear(in_features=4096, out_features=10),
         )
     def forward(self, x):
         x = self.features(x)
         return x
if __name__ == '__main__':
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    model = AlexNet().to(device)
    print(summary(model, (3 , 227, 227)))

