import torch
import torch.nn as nn

from .backbone import Backbone


class Head(nn.Module):
    def __init__(self) -> None:
        super().__init__()
        self.head: nn.Module

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        return self.head(x)


class YoloHead(Head):
    def __init__(
        self, backbone: Backbone, grid_size: int, bounding_box_num: int, class_num: int
    ) -> None:
        super().__init__()
        self.bounding_box_num = bounding_box_num
        self.class_num = class_num
        self.head = nn.Sequential(
            nn.Conv2d(backbone.feature_channels, 1024, kernel_size=3, padding=1),
            nn.LeakyReLU(0.1),
            nn.Conv2d(1024, 1024, kernel_size=3, padding=1, stride=2),
            nn.LeakyReLU(0.1),
            nn.Conv2d(1024, 1024, kernel_size=3, padding=1),
            nn.LeakyReLU(0.1),
            nn.Conv2d(1024, 1024, kernel_size=3, padding=1),
            nn.LeakyReLU(0.1),
            nn.Flatten(),
            nn.Linear(7 * 7 * 1024, 4096),
            nn.LeakyReLU(0.1),
            nn.Dropout(0.5),
            nn.Linear(4096, grid_size * grid_size * (5 * bounding_box_num + class_num)),
            nn.Unflatten(1, (grid_size, grid_size, 5 * bounding_box_num + class_num)),
        )

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        output = self.head(x)
        output[..., : 5 * self.bounding_box_num] = torch.sigmoid(
            output[..., : 5 * self.bounding_box_num]
        )
        output[..., 5 * self.bounding_box_num :] = torch.softmax(
            output[..., 5 * self.bounding_box_num :], dim=-1
        )
        return output
