from torchviz import make_dot
import torch
from transformers import BertTokenizer
from optimized_finetune import BertForDualRelation, preprocess_dual_relation

# 加载模型和分词器
model = BertForDualRelation.from_pretrained("./optimized_dual_relation_bert")
tokenizer = BertTokenizer.from_pretrained("bert-base-chinese")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)

# 生成真实的测试输入（确保包含标签，否则模型不会计算损失）
sample_text = {
    "hobby": "爱好是古建筑绘画",
    "attraction": "北京故宫博物院是中国明清两代的皇家宫殿",
    "type": "历史古迹",
    "rel1_score": 1.0,
    "rel2_score": 1.0
}

# 预处理输入
inputs = preprocess_dual_relation(sample_text, tokenizer)
# 转换为张量并移动到设备
input_tensors = {k: torch.tensor([v]).to(device) for k, v in inputs.items()}

# 确保模型计算损失（必须传入标签）
model.train()  # 切换到训练模式
with torch.enable_grad():
    output = model(**input_tensors)
    loss = output[0]  # 获取损失值

# 可视化计算图
if loss is not None:
    loss_graph = make_dot(loss, params=dict(model.named_parameters()))
    loss_graph.render("dual_relation_model", format="png", cleanup=True)
else:
    print("无法生成计算图：损失值为None，请检查输入是否包含标签")