glenn-jocher commited on
Commit
0c13240
·
unverified ·
1 Parent(s): 50ff6ee

Fix ONNX `--dynamic` export on GPU (#8378)

Browse files

* Fix ONNX `--dynamic` export on GPU

Patch forces --dynamic export model and image to CPU. Resolves bug raised in https://github.com/ultralytics/yolov5/issues/8377

* Update export.py

Files changed (1) hide show
  1. export.py +4 -4
export.py CHANGED
@@ -119,8 +119,8 @@ def export_onnx(model, im, file, opset, train, dynamic, simplify, prefix=colorst
119
  f = file.with_suffix('.onnx')
120
 
121
  torch.onnx.export(
122
- model,
123
- im,
124
  f,
125
  verbose=False,
126
  opset_version=opset,
@@ -499,8 +499,6 @@ def run(
499
  im = torch.zeros(batch_size, 3, *imgsz).to(device) # image size(1,3,320,192) BCHW iDetection
500
 
501
  # Update model
502
- if half and not coreml and not xml:
503
- im, model = im.half(), model.half() # to FP16
504
  model.train() if train else model.eval() # training mode = no Detect() layer grid construction
505
  for k, m in model.named_modules():
506
  if isinstance(m, Detect):
@@ -510,6 +508,8 @@ def run(
510
 
511
  for _ in range(2):
512
  y = model(im) # dry runs
 
 
513
  shape = tuple(y[0].shape) # model output shape
514
  LOGGER.info(f"\n{colorstr('PyTorch:')} starting from {file} with output shape {shape} ({file_size(file):.1f} MB)")
515
 
 
119
  f = file.with_suffix('.onnx')
120
 
121
  torch.onnx.export(
122
+ model.cpu() if dynamic else model, # --dynamic only compatible with cpu
123
+ im.cpu() if dynamic else im,
124
  f,
125
  verbose=False,
126
  opset_version=opset,
 
499
  im = torch.zeros(batch_size, 3, *imgsz).to(device) # image size(1,3,320,192) BCHW iDetection
500
 
501
  # Update model
 
 
502
  model.train() if train else model.eval() # training mode = no Detect() layer grid construction
503
  for k, m in model.named_modules():
504
  if isinstance(m, Detect):
 
508
 
509
  for _ in range(2):
510
  y = model(im) # dry runs
511
+ if half and not coreml:
512
+ im, model = im.half(), model.half() # to FP16
513
  shape = tuple(y[0].shape) # model output shape
514
  LOGGER.info(f"\n{colorstr('PyTorch:')} starting from {file} with output shape {shape} ({file_size(file):.1f} MB)")
515