import torch
import torch.nn as nn
import torch.nn.functional as F

def compute_var(y):
        y = y.view(-1, y.size(-1))
        zc = torch.tensor(y.size(0)).cuda()
        zs = y.sum(dim=0)
        zss = (y ** 2).sum(dim=0)
        var = zss / (zc - 1) - (zs ** 2) / (zc * (zc - 1))
        return torch.sqrt(var + 1e-6)

class TemporalContrastiveLoss(nn.Module):
    def __init__(self, cnst_win=11, temperature=0.1):
        super(TemporalContrastiveLoss, self).__init__()
        self.temperature = temperature
        self.cnst_win = cnst_win
        assert self.cnst_win % 2 == 1, "window size must be odd"
    
    def forward(self, stu_frm, tea_frm, mask=None):
        std_frm_stu = compute_var(F.normalize(stu_frm, dim=-1)).mean()
        std_frm_tea = compute_var(F.normalize(tea_frm, dim=-1)).mean()
        if mask is None:
            stu_frm = stu_frm.chunk(2)
            tea_frm = tea_frm.chunk(2)
            total_loss_frm = 0
            n_loss_terms = 0
            byola_loss_frm = 0
            for iq,q in enumerate(tea_frm):
                for iv,v in enumerate(stu_frm):
                    if iq==iv:
                        continue
                    loss, byola_loss = self.contrastive_loss(q, v)
                    n_loss_terms += 1
                    byola_loss_frm += byola_loss
                    total_loss_frm += loss
            total_loss_frm /= n_loss_terms
            byola_loss_frm /= n_loss_terms
        else:
            raise NotImplementedError("mask not implemented")
        
        return total_loss_frm, std_frm_stu, std_frm_tea, byola_loss_frm
        
        
    def contrastive_loss(self, pred, tar):
        if len(pred.shape) == 2:
            pred = F.normalize(pred, dim=-1)    # [B * T, D]
            tar = F.normalize(tar, dim=-1)      # [B * T, D]
            byola_loss = 2 - 2 * (pred * tar).sum(dim=1).mean()
            return 0, byola_loss
        elif len(pred.shape) == 3:
            B, T, D = pred.shape
            pred = F.normalize(pred, dim=-1)    # [B, T, D]
            tar = F.normalize(tar, dim=-1)      # [B, T, D]
            # BYOLA loss
            byola_loss = 2 - 2 * (pred.reshape(-1, D) * tar.reshape(-1, D)).sum(dim=1).mean()
            # Pad before unfold
            tar = tar.transpose(1, 2)           # [B, D, T]
            tar = F.pad(tar, (self.cnst_win // 2, self.cnst_win // 2)) # [B, D, T + W - 1]
            # unfold tar by window size to generate target for predicions
            tar = tar.unfold(2, self.cnst_win, 1) 
            assert tar.shape == (B, D, T, self.cnst_win), "wrong shape after unfold"
            
            # compute similarity
            pred = pred.unsqueeze(3).transpose(2, 3) # [B, T, 1, D]
            tar = tar.permute(0, 2, 3, 1)            # [B, T, W, D]
            logits = (pred * tar).sum(-1)            # [B, T, W]
            logits = (logits / self.temperature).reshape(-1, self.cnst_win) # [B * T, W]
            label_index = self.cnst_win // 2             # label is the center of the window (index -1 in python)
            labels = torch.tensor([label_index], device=pred.device).repeat(B * T,)
        return F.cross_entropy(logits.reshape(-1, self.cnst_win), labels), byola_loss