import os
import torch
import torch.onnx
from torchinfo import summary


def export_onnx(model, setting, device):
    print('loading model')
    model.load_state_dict(torch.load(os.path.join('./checkpoints/' + setting, 'checkpoint.pth')))

    model.eval()

    dummy_inputs = torch.randn(128, 100, 38, device=device).float()

    # summary(model, input=(128, 100, 38), depth=3, )
    # if torch.__version__ >= "2.0":
    #     model = torch.compile(model,backend="eager")

    out = model(dummy_inputs, None, None, None)
    if isinstance(out, tuple):
        out = out[0]
        output_names = ['output', 'enc_out']

    else:
        output_names = ['output']

    print(out.shape)

    print("正在转换为ONNX格式...")
    onnx_output_path = os.path.join('./checkpoints/' + setting, 'model.onnx')

    # dynamic_shapes = (
    #     {0: "batch_size"},
    #     {"x_enc": {0: "batch_size"}},
    #     [{0: "batch_size"}],
    # )
    # 导出ONNX模型
    try:
        with torch.no_grad():
            torch.onnx.export(
                model,
                (dummy_inputs, None, None, None),
                onnx_output_path,
                export_params=True,
                # opset_version=20,
                do_constant_folding=True,
                input_names=['x_enc', 'x_mark_enc', 'x_dec', 'x_mark_dec'],
                output_names=output_names,
                verbose=True,
                # dynamo=True
            )
        print(f"ONNX模型已成功保存到: {onnx_output_path}")

    except Exception as e:
        print(f"转换过程中出现错误: {str(e)}")
