import torch
import torch.nn.functional as F

from torch import nn


class GlobalContrastiveLoss(nn.Module):
    """
    Loss function for multimodal contrastive learning based on the CLIP paper.
    
    Embeddings are taken, L2 normalized, and a dot product between modalities is calculated to generate a cosine
    similarity between all combinations of subjects in a cross-modal fashion. Tempered by a log-parameterized
    and clipped temperature.
    """
    def __init__(self, temperature: float = 0.1, alpha: float = 0.5) -> None:
        super().__init__()
        self.temperature = temperature
        self.alpha = alpha
        self.cross_entropy = nn.CrossEntropyLoss(reduction='mean')

    def forward(self, z1: torch.Tensor, z2: torch.Tensor):
        z1 = F.normalize(z1, p=2, dim=1)
        z2 = F.normalize(z2, p=2, dim=1)

        logits = torch.matmul(z1, z2.T) / self.temperature
        labels = torch.arange(logits.shape[0]).long().to(logits.device)

        loss_z1 = self.cross_entropy(logits, labels)
        loss_z2 = self.cross_entropy(logits.T, labels)

        return self.alpha * loss_z1 + (1.0 - self.alpha) * loss_z2
