import os
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
import onnx
import onnxruntime as ort
import numpy as np


def validate_onnx_model(model_path, model_name, text):
    tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
    # model = AutoModelForCausalLM.from_pretrained(model_name, trust_remote_code=True)
    # model.eval()
    # model.config.use_cache = False
    
    print("model loaded")

    inputs = tokenizer(text, return_tensors="pt")
    ort_session = ort.InferenceSession(model_path)
    ort_inputs = {k: v.cpu().numpy() for k, v in inputs.items()}
    
    inputs = tokenizer(text, return_tensors="pt")
    # 生成position_ids
    attention_mask = inputs["attention_mask"]
    ort_inputs = {  
    "input_ids": inputs["input_ids"].cpu().numpy().astype(np.int64),  
    "attention_mask": inputs["attention_mask"].cpu().numpy().astype(np.int64)  
    }
    
    print("load onnx")
    ort_session = ort.InferenceSession(model_path)
    print("run onnx")
    ort_outputs = ort_session.run(None, ort_inputs)

    # with torch.no_grad():
    #     torch_outputs = model(**inputs, use_cache=False)
    # torch_outputs.logits 对应 ort_outputs[0]
    
    ort_output_np = ort_outputs[0]
    print(ort_output_np)
    # torch_output_np = torch_outputs.logits.cpu().numpy()
    # try:
    #     #np.testing.assert_allclose(ort_output_np, torch_output_np, rtol=1e-03, atol=1e-05)
    #     np.testing.assert_allclose(ort_output_np, torch_output_np, rtol=1e-03, atol=1e-04)
    #     print("输出匹配！")
    # except AssertionError as e:
    #     print("输出不匹配！")
    #     print(e)

     # 解码预测结果
    predicted_token_ids = np.argmax(ort_output_np, axis=-1)
    decoded_text = tokenizer.batch_decode(
        predicted_token_ids,
        skip_special_tokens=True,
        clean_up_tokenization_spaces=True
    )[0]  # 取batch中第一个结果
    
    print("解码结果:", decoded_text)
    return decoded_text

    print("ONNX 模型验证完成。")

if __name__ == "__main__":
    model_name = "/home/icca/users/general/race/telechat/12B"
    onnx_model_path = "12B.onnx"
    validate_onnx_model(onnx_model_path, model_name, "你好")