import torch
import torch.nn as nn
import torch.nn.functional as F

class MyNLLLoss(nn.Module):
    def __init__(self):
        super(MyNLLLoss, self).__init__()
        self.softmax = nn.LogSoftmax(dim=-1)
        self.criterion = nn.NLLLoss(ignore_index=0)


    def forward(self, mask_lm_output, template_vectors, data_label):
        # Ensure mask_lm_output shape is [32, 512, 768]
        batch_size, seq_len, hidden_size = mask_lm_output.shape
        # Ensure template_vectors shape is [nums, 768]
        num_templates, _ = template_vectors.shape

        # Reshape mask_lm_output to [batch_size * seq_len, hidden_size]
        reshaped_mask_lm_output = mask_lm_output.reshape(-1, hidden_size)

        # Expand reshaped_mask_lm_output to [batch_size * seq_len, num_templates, hidden_size]
        expanded_mask_lm_output = reshaped_mask_lm_output.unsqueeze(1).repeat(1, num_templates, 1)

        # Expand template_vectors to [batch_size * seq_len, num_templates, hidden_size]
        expanded_template_vectors = template_vectors.unsqueeze(0).repeat(batch_size * seq_len, 1, 1)

        # Compute cosine similarity
        cosine_sim = F.cosine_similarity(expanded_mask_lm_output, expanded_template_vectors, dim=-1)
        loss1 = 1 - cosine_sim

        # Compute Euclidean distance
        loss2 = torch.norm(expanded_mask_lm_output - expanded_template_vectors, dim=-1)

        # Sum the losses
        loss = loss1 + loss2

        # Compute probabilities using softmax
        probabilities = self.softmax(-loss)

        # Reshape probabilities back to [batch_size, seq_len, num_templates]
        probabilities = probabilities.view(batch_size, seq_len, num_templates)

        mask_loss = self.criterion(probabilities.transpose(1, 2), data_label)

        return mask_loss
    