import torch.nn
import torch.nn as nn
import torch.nn.functional as F


class LeNet5(nn.Module):
    def __init__(self, ):
        super(LeNet5, self).__init__()
        self.conv = nn.Sequential(
            nn.Conv2d(in_channels=1, out_channels=6, kernel_size=5, padding=2),
            nn.ReLU(),
            nn.MaxPool2d(2, 2),
            nn.Conv2d(in_channels=6, out_channels=16, kernel_size=5),
            nn.ReLU(),
            nn.MaxPool2d(2, 2))
        self.fc = nn.Sequential(
            nn.Flatten(),
            nn.Linear(in_features=16 * 5 * 5, out_features=128),
            nn.ReLU(),
            nn.Dropout(0.5),
            nn.Linear(in_features=128, out_features=256),
            nn.ReLU(),
            nn.Dropout(0.5),
            nn.Linear(in_features=256, out_features=10))

    def forward(self, img, labels=None):
        output = self.conv(img)
        logits = self.fc(output)
        if labels is not None:
            loss_fct = nn.CrossEntropyLoss(reduction='mean')
            loss = loss_fct(logits, labels)
            return loss, logits
        else:
            return logits


class StudentWithoutDistilling(nn.Module):
    def __init__(self, input_node=28 * 28, output_node=10):
        super(StudentWithoutDistilling, self).__init__()
        self.net = nn.Sequential(nn.Flatten(),
                                 nn.Linear(input_node, 512),
                                 nn.ReLU(),
                                 nn.Dropout(0.5),
                                 nn.Linear(512, output_node)
                                 )

    def forward(self, img, labels=None):
        logits = self.net(img)
        if labels is not None:
            loss_fct = nn.CrossEntropyLoss(reduction='mean')
            loss = loss_fct(logits, labels)
            return loss, logits
        else:
            return logits


class StudentWithDistilling(nn.Module):
    def __init__(self, input_node=28 * 28, output_node=10, temperature=2., alpha=0.5):
        super(StudentWithDistilling, self).__init__()
        self.net = nn.Sequential(nn.Flatten(),
                                 nn.Linear(input_node, 512),
                                 nn.ReLU(),
                                 nn.Dropout(0.5),
                                 nn.Linear(512, output_node))
        self.temperature = temperature
        self.alpha = alpha

    def forward(self, img, soft_logits=None, hard_labels=None):
        logits = self.net(img)
        if hard_labels is not None and soft_logits is not None:
            loss_fct = nn.CrossEntropyLoss(reduction='mean')
            # loss_soft = loss_fct(logits / self.temperature,
            #                      F.softmax(soft_logits / self.temperature,dim=-1))
            loss_soft = nn.functional.kl_div(F.softmax(logits / self.temperature, dim=-1),
                                             F.softmax(soft_logits / self.temperature, dim=-1),
                                             reduction='batchmean')
            loss_hard = loss_fct(logits, hard_labels)
            loss = self.alpha * loss_soft * self.temperature ** 2 + (1 - self.alpha) * loss_hard
            return loss, logits
        else:
            return logits
