glenn-jocher commited on
Commit
950a85d
1 Parent(s): c16671f

TensorRT PyTorch Hub inference fix (#7560)

Browse files

Solution proposed in https://github.com/ultralytics/yolov5/issues/7128 to TRT PyTorch Hub CUDA illegal memory errors.

Files changed (1) hide show
  1. models/common.py +1 -1
models/common.py CHANGED
@@ -531,7 +531,7 @@ class AutoShape(nn.Module):
531
  # multiple: = [Image.open('image1.jpg'), Image.open('image2.jpg'), ...] # list of images
532
 
533
  t = [time_sync()]
534
- p = next(self.model.parameters()) if self.pt else torch.zeros(1) # for device and type
535
  autocast = self.amp and (p.device.type != 'cpu') # Automatic Mixed Precision (AMP) inference
536
  if isinstance(imgs, torch.Tensor): # torch
537
  with amp.autocast(autocast):
 
531
  # multiple: = [Image.open('image1.jpg'), Image.open('image2.jpg'), ...] # list of images
532
 
533
  t = [time_sync()]
534
+ p = next(self.model.parameters()) if self.pt else torch.zeros(1, device=self.model.device) # for device, type
535
  autocast = self.amp and (p.device.type != 'cpu') # Automatic Mixed Precision (AMP) inference
536
  if isinstance(imgs, torch.Tensor): # torch
537
  with amp.autocast(autocast):