import torch
import torch.nn as nn
import torch.nn.functional as F

class SemanticAlignmentLoss(nn.Module):
    def __init__(self, temperature=0.07):
        super().__init__()
        self.temp = temperature
        
    def forward(self, pre_quant, post_quant):
        """
        pre_quant:  [B*K, L, D] (编码器特征)
        post_quant: [B*K, L, D] (解码器特征)
        """
        # 归一化处理
        pre_norm = F.normalize(pre_quant, dim=-1)  # [B*K, L, D]
        post_norm = F.normalize(post_quant, dim=-1)  # [B*K, L, D]
        
        # 计算相似度矩阵
        sim_matrix = torch.einsum('bld,bmd->blm', pre_norm, post_norm) / self.temp  # [B*K, L, L]
        
        # 调整维度适应交叉熵
        batch_size = pre_quant.size(0)  # B*K
        seq_len = pre_quant.size(1)  # L
        
        # 重塑为 (B*K*L, L)
        sim_matrix = sim_matrix.reshape(-1, seq_len)  # [B*K*L, L]
        
        # 生成正确标签 [0,1,2,...,L-1] 重复 B*K 次
        labels = torch.arange(seq_len, device=pre_quant.device).repeat(batch_size)  # [B*K*L]
        
        return F.cross_entropy(sim_matrix, labels)