import torch
import torch.nn as nn
import torch.nn.functional as F

def byol_loss_func(p: torch.Tensor, z: torch.Tensor, simplified: bool = True) :
    if simplified:
        return 2 - 2 * F.cosine_similarity(p, z, dim=-1).mean()

    p = F.normalize(p, dim=-1)
    z = F.normalize(z, dim=-1)

    return 2 - 2 * (p * z).sum(dim=1).mean()

def compute_var(y):
        y = y.view(-1, y.size(-1))
        zc = torch.tensor(y.size(0)).cuda()
        zs = y.sum(dim=0)
        zss = (y ** 2).sum(dim=0)
        var = zss / (zc - 1) - (zs ** 2) / (zc * (zc - 1))
        return torch.sqrt(var + 1e-6)

class ByolLoss(nn.Module):
    def __init__(self, symmetric):
        super().__init__()
        self.symmetric = symmetric

    def forward(self, student, teacher):
        stu_frm = student
        tea_frm = teacher

        std_frm_stu = compute_var(F.normalize(stu_frm,dim=-1)).mean()
        std_frm_tea = compute_var(F.normalize(tea_frm,dim=-1)).mean()
        if self.symmetric:
            stu_frm = stu_frm.chunk(2)
            tea_frm = tea_frm.chunk(2)
            total_loss_frm = 0
            n_loss_terms = 0
            for iq,q in enumerate(tea_frm):
                for iv,v in enumerate(stu_frm):
                    if iq==iv:
                        continue
                    loss = byol_loss_func(q,v,simplified=False)
                    n_loss_terms+=1
                    total_loss_frm += loss
            total_loss_frm /= n_loss_terms
        else:
            total_loss_frm = byol_loss_func(tea_frm,stu_frm)
        return total_loss_frm, std_frm_stu, std_frm_tea