import torch
import torch.nn as nn

class Loss(nn.Module):
    def __init__(self):
        super(Loss, self).__init__()
        self.embeddings = None

    def register_embeddings(self, embeddings: torch.Tensor):
        """
        Register embeddings for the loss function.
        This method can be overridden by subclasses if needed.
        """
        self.embeddings = embeddings

    def forward(self, preds: torch.Tensor, targets: torch.Tensor):
        # register embeddings first
        self.register_embeddings(embeddings=preds)
        raise NotImplementedError("Loss function must implement the forward method.")
    
    def compute_var(self, y):
        if self.embeddings is None:
            raise ValueError("Embeddings must be registered before computing variance.")
        y = self.embeddings.view(-1, y.size(-1))
        zc = torch.tensor(y.size(0)).cuda()
        zs = y.sum(dim=0)
        zss = (y ** 2).sum(dim=0)
        var = zss / (zc - 1) - (zs ** 2) / (zc * (zc - 1))
        return torch.sqrt(var + 1e-6)