import cv2
import onnx
import onnxruntime
import numpy as np

import torch
import segmentation_models_pytorch as smp

model = smp.DeepLabV3Plus("resnet34", encoder_weights="imagenet", classes=1)
model = model.eval()

# dynamic_axes is used to specify the variable length axes. it can be just batch size
dynamic_axes = {0: "batch_size", 2: "height", 3: "width"}

onnx_model_name = "DeepLabV3Plus_resnet34.onnx"

onnx_model = torch.onnx.export(
    model,  # model being run
    torch.randn(1, 3, 224, 224),  # model input
    onnx_model_name,  # where to save the model (can be a file or file-like object)
    export_params=True,  # store the trained parameter weights inside the model file
    opset_version=12,  # the ONNX version to export
    do_constant_folding=True,  # whether to execute constant folding for optimization
    input_names=["input"],  # the model's input names
    output_names=["output"],  # the model's output names
    dynamic_axes={  # variable length axes
        "input": dynamic_axes,
        "output": dynamic_axes,
    },
)


onnx_model = onnx.load(onnx_model_name)
onnx.checker.check_model(onnx_model)

# create sample with different batch size, height and width
# from what we used in export above
sample = torch.randn(1,3, 512, 512)
img=cv2.imread("1.jpg")
img=np.expand_dims(img,axis=0)
img=np.array(img,dtype=np.float32)
batch,height,width,channel=img.shape
img=np.reshape(img,(batch,channel,height,width))
ort_session = onnxruntime.InferenceSession(
    onnx_model_name, providers=["CPUExecutionProvider"]
)

# compute ONNX Runtime output prediction
# ort_inputs = {"input": sample.numpy()}
ort_inputs = {"input": img}
ort_outputs = ort_session.run(output_names=None, input_feed=ort_inputs)
ort_outputs

# compute PyTorch output prediction
with torch.no_grad():
    torch_out = model(torch.from_numpy(img))

# compare ONNX Runtime and PyTorch results
np.testing.assert_allclose(torch_out.numpy(), ort_outputs[0], rtol=1e-03, atol=1e-05)

print("Exported model has been tested with ONNXRuntime, and the result looks good!")