import torch


class Darknet19(torch.nn.Module):
    def __init__(self, n):
        super().__init__()
        self.sequential1 = torch.nn.Sequential(
            torch.nn.Conv2d(3, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)), torch.nn.BatchNorm2d(32),
            torch.nn.LeakyReLU(),
            torch.nn.MaxPool2d((2, 2), stride=(2, 2)),
            torch.nn.Conv2d(32, 64, (3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(64), torch.nn.LeakyReLU(),
            torch.nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)),
            torch.nn.Conv2d(64, 128, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(128),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(128, 64, kernel_size=(1, 1)), torch.nn.BatchNorm2d(64), torch.nn.LeakyReLU(),
            torch.nn.Conv2d(64, 128, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(128),
            torch.nn.LeakyReLU(),
            torch.nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)),
            torch.nn.Conv2d(128, 256, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(256),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(256, 128, kernel_size=(1, 1)), torch.nn.BatchNorm2d(128), torch.nn.LeakyReLU(),
            torch.nn.Conv2d(128, 256, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(256),
            torch.nn.LeakyReLU(),
            torch.nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)),
            torch.nn.Conv2d(256, 512, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(512),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(512, 256, kernel_size=(1, 1)), torch.nn.BatchNorm2d(256), torch.nn.LeakyReLU(),
            torch.nn.Conv2d(256, 512, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(512),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(512, 256, kernel_size=(1, 1)), torch.nn.BatchNorm2d(256), torch.nn.LeakyReLU(),
            torch.nn.Conv2d(256, 512, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(512),
            torch.nn.LeakyReLU(),
            torch.nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)),

        )

        self.sequential2 = torch.nn.Sequential(
            torch.nn.Conv2d(512, 1024, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(1024),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(1024, 512, kernel_size=(1, 1)), torch.nn.BatchNorm2d(512), torch.nn.LeakyReLU(),
            torch.nn.Conv2d(512, 1024, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(1024),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(1024, 512, kernel_size=(1, 1)), torch.nn.BatchNorm2d(512), torch.nn.LeakyReLU(),
            torch.nn.Conv2d(512, 1024, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(1024),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(1024, 1024, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(1024),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(1024, 1024, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(1024),
            torch.nn.LeakyReLU(),
        )
        self.sequential3 = torch.nn.Sequential(
            torch.nn.Conv2d(1024, 1024, kernel_size=(3, 3), padding=(1, 1)), torch.nn.BatchNorm2d(1024),
            torch.nn.LeakyReLU(),
            torch.nn.Conv2d(1024, n, kernel_size=(1, 1)), torch.nn.BatchNorm2d(n),
            torch.nn.LeakyReLU(),
            torch.nn.BatchNorm2d(n)
        )
        # self.pass_through = torch.nn.Sequential(
        #     torch.nn.Conv2d(512, 64, kernel_size=(1, 1)),
        #     torch.nn.Conv2d(64,256,kernel_size=(1,1),stride=(2,2))
        # )

    def forward(self, x):
        x1 = self.sequential1(x)
        x2 = self.sequential2(x1)
        return self.sequential3(x2)
