import torch
import torch.nn as nn
import torch.nn.functional as F

def compute_var(y):
        y = y.view(-1, y.size(-1))
        zc = torch.tensor(y.size(0)).cuda()
        zs = y.sum(dim=0)
        zss = (y ** 2).sum(dim=0)
        var = zss / (zc - 1) - (zs ** 2) / (zc * (zc - 1))
        return torch.sqrt(var + 1e-6)
    
    
class BYOLLoss(nn.Module):
    def __init__(self, temperature=1.0):
        super(BYOLLoss, self).__init__()
        self.temperature = temperature
    
    def forward(self, stu_frm, tea_frm):
        B, D = stu_frm.shape
        assert stu_frm.shape == tea_frm.shape, "student and teacher features must have the same shape"
        std_frm_stu = compute_var(F.normalize(stu_frm, dim=-1)).mean()
        std_frm_tea = compute_var(F.normalize(tea_frm, dim=-1)).mean()
        stu_frm = stu_frm.chunk(2)
        tea_frm = tea_frm.chunk(2)
        n_loss_terms = 0
        byola_loss_frm = 0
        for iq,q in enumerate(tea_frm):
            for iv,v in enumerate(stu_frm):
                if iq==iv:
                    continue
                byola_loss = self.byol_loss(q, v)
                n_loss_terms += 1
                byola_loss_frm += byola_loss
        byola_loss_frm /= n_loss_terms
        
        return std_frm_stu, std_frm_tea, byola_loss_frm
        
        
    def byol_loss(self, pred, tar):
        pred = F.normalize(pred, dim=-1)    # [B * T, D]
        tar = F.normalize(tar, dim=-1)      # [B * T, D]
        byola_loss = 2 - 2 * (pred * tar).sum(dim=1).mean()
        return byola_loss