# validate_output.py
import torch
from transformers import BertModel, BertTokenizer
import torch.nn.functional as F


def validate_output(output_tensor, input_text, model_name="bert-base-uncased", device="cpu"):
    """
    验证分布式系统输出是否与标准 BERT 模型输出一致。

    参数:
        output_tensor (Tensor): 来自 Voltage 推理系统的输出 [seq_len, hidden_dim]
        input_text (str): 输入文本，用于标准模型验证
        model_name (str): 使用的 BERT 模型名
        device (str): 使用设备（CPU 或 GPU）

    返回:
        dict: 验证结果，包括余弦相似度、差异均值等
    """
    tokenizer = BertTokenizer.from_pretrained(model_name)
    model = BertModel.from_pretrained(model_name).to(device)
    model.eval()

    with torch.no_grad():
        inputs = tokenizer(input_text, return_tensors="pt", padding="max_length", truncation=True, max_length=output_tensor.shape[0])
        input_ids = inputs["input_ids"].to(device)
        outputs = model(input_ids)
        ref_output = outputs.last_hidden_state.squeeze(0).to("cpu")  # [seq_len, hidden_dim]

    # 对齐长度（Voltage 可能比 max_length 短）
    min_len = min(ref_output.shape[0], output_tensor.shape[0])
    output_tensor = output_tensor[:min_len]
    ref_output = ref_output[:min_len]

    # 计算余弦相似度和差异
    cos_sim = F.cosine_similarity(output_tensor, ref_output, dim=1).mean().item()
    mse = F.mse_loss(output_tensor, ref_output).item()
    l1 = F.l1_loss(output_tensor, ref_output).item()

    return {
        "cosine_similarity": cos_sim,
        "mse": mse,
        "l1": l1,
        "shape": output_tensor.shape
    }
