from torch.optim import AdamW

from demo5 import MyModel
from demo4 import MyDataset
import torch
from torch.utils.data import DataLoader
from transformers import BertTokenizer

DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
token = BertTokenizer.from_pretrained(r"D:\pythonWork\python_demo\model\google-bert\bert-base-chinese\models--google-bert--bert-base-chinese\snapshots\c30a6ed22ab4564dc1e3b2ecbf6e766b0611a33f")
model = MyModel().to(DEVICE)
# 加载最佳权重
model.load_state_dict(torch.load("params/1bert.pt"))
model.eval()
tokenizer = BertTokenizer.from_pretrained(r"D:\pythonWork\python_demo\model\google-bert\bert-base-chinese\models--google-bert--bert-base-chinese\snapshots\c30a6ed22ab4564dc1e3b2ecbf6e766b0611a33f")
text = "这是一个测试句子"
inputs = tokenizer(text, return_tensors="pt", padding="max_length", max_length=350)

dummy_input = (
    inputs["input_ids"],        # 形状 [1, 128]
    inputs["attention_mask"],   # 形状 [1, 128]
    inputs["token_type_ids"]    # 形状 [1, 128]（全0）
)
# 导出ONNX
torch.onnx.export(
    model,
    dummy_input,
    "bert_base_chinese.onnx",
    input_names=["input_ids", "attention_mask", "token_type_ids"],
    output_names=["last_hidden_state"],
    dynamic_axes={
        "input_ids": {0: "batch_size", 1: "sequence_length"},
        "attention_mask": {0: "batch_size", 1: "sequence_length"},
        "token_type_ids": {0: "batch_size", 1: "sequence_length"},
        "last_hidden_state": {0: "batch_size", 1: "sequence_length"}
    },
    opset_version=14  # BERT需要>=11
)
import onnx
onnx_model = onnx.load("bert_base_chinese.onnx")
onnx.checker.check_model(onnx_model)
# # 测试PyTorch原始输出
# with torch.no_grad():
#     pytorch_output = model(*dummy_input)  # 可能是元组或张量
#
# # 测试ONNX输出
# import onnxruntime as ort
# ort_session = ort.InferenceSession("bert_base_chinese.onnx")
# onnx_output = ort_session.run(None, {
#     "input_ids": dummy_input[0].numpy(),
#     "attention_mask": dummy_input[1].numpy(),
#     "token_type_ids": dummy_input[2].numpy()
# })
#
# print("PyTorch输出类型:", type(pytorch_output))
# print("ONNX输出数量:", len(onnx_output))