import torch
import torch.nn as nn
import torch.nn.functional as F


class CosineClf(nn.Module):
    def __init__(self, embed_dim, nb_classes, **fc_kwargs):
        super(CosineClf, self).__init__()
        self.nb_classes = nb_classes
        self.embed_dim = embed_dim
        self.weight = nn.Parameter(torch.randn(self.nb_classes, self.embed_dim))
        if fc_kwargs["fc_temperture"]:
            self.temperature = nn.Parameter(torch.ones(self.embed_dim).float())
            self.temperature.requires_grad = True
        else:
            self.temperature = nn.Parameter(torch.ones(self.embed_dim).float())
            self.temperature.requires_grad = False

        self.with_norm = fc_kwargs["with_norm"]
        if self.with_norm:
            self.norm = nn.LayerNorm(self.embed_dim)
            self.norm.reset_parameters()

        self.__init_weights()

    def __init_weights(self):
        nn.init.trunc_normal_(self.weight, std=0.02)
        if self.temperature.requires_grad:
            nn.init.ones_(self.temperature)

    def forward(self, logits):
        """Use temperature to scale the cosine similarity
        Args:
            x: [B, D] where D is the embedding dimension
            self.weight: [C, D] where C is the number of classes
            self.temperature : [D]
        """
        if self.with_norm:
            logits = self.norm(logits)

        logits = F.normalize(logits, p=2, dim=-1)
        weight = F.normalize(self.weight, p=2, dim=-1) * self.temperature
        dists = F.linear(logits, weight)
        out = {"logits": dists}
        return out


class LinearClf(nn.Module):
    def __init__(self, embed_dim, nb_classes, **fc_kwargs):
        super(LinearClf, self).__init__()
        self.nb_classes = nb_classes
        self.embed_dim = embed_dim
        self.with_norm = fc_kwargs["with_norm"]
        self.weight = nn.Parameter(torch.randn(self.nb_classes, self.embed_dim))

        if fc_kwargs["fc_bias"]:
            self.bias = nn.Parameter(torch.zeros(self.nb_classes))
        if self.with_norm:
            self.norm = nn.LayerNorm(self.embed_dim)
            self.norm.reset_parameters()

        self.__init_weights()

    def __init_weights(self):
        nn.init.trunc_normal_(self.weight, std=0.02)
        nn.init.constant_(self.bias, 0)

    def forward(self, logits):
        """Use temperature to scale the cosine similarity
        Args:
            x: [B, D] where D is the embedding dimension
            self.weight: [C, D] where C is the number of classes
            self.temperature : [D]
        """
        if self.with_norm:
            logits = self.norm(logits)
        dists = F.linear(logits, self.weight, self.bias)
        out = {"logits": dists}
        return out
