glenn-jocher commited on
Commit
0032af2
1 Parent(s): 1d17b9a

Update utils.py strip_optimizer() (#509)

Browse files
Files changed (2) hide show
  1. Dockerfile +1 -1
  2. utils/utils.py +6 -14
Dockerfile CHANGED
@@ -43,7 +43,7 @@ COPY . /usr/src/app
43
  # sudo docker commit 092b16b25c5b usr/resume && sudo docker run -it --gpus all --ipc=host -v "$(pwd)"/coco:/usr/src/coco --entrypoint=sh usr/resume
44
 
45
  # Send weights to GCP
46
- # python -c "from utils.utils import *; create_pretrained('path/last.pt')" && gsutil cp weights/pretrained.pt gs://*
47
 
48
  # Clean up
49
  # docker system prune -a --volumes
 
43
  # sudo docker commit 092b16b25c5b usr/resume && sudo docker run -it --gpus all --ipc=host -v "$(pwd)"/coco:/usr/src/coco --entrypoint=sh usr/resume
44
 
45
  # Send weights to GCP
46
+ # python -c "from utils.utils import *; strip_optimizer('runs/exp0/weights/last.pt', 'temp.pt')" && gsutil cp temp.pt gs://*
47
 
48
  # Clean up
49
  # docker system prune -a --volumes
utils/utils.py CHANGED
@@ -645,26 +645,18 @@ def non_max_suppression(prediction, conf_thres=0.1, iou_thres=0.6, merge=False,
645
  return output
646
 
647
 
648
- def strip_optimizer(f='weights/best.pt'): # from utils.utils import *; strip_optimizer()
649
- # Strip optimizer from *.pt files for lighter files (reduced by 1/2 size)
650
- x = torch.load(f, map_location=torch.device('cpu'))
651
- x['optimizer'] = None
652
- x['model'].half() # to FP16
653
- torch.save(x, f)
654
- print('Optimizer stripped from %s, %.1fMB' % (f, os.path.getsize(f) / 1E6))
655
-
656
-
657
- def create_pretrained(f='weights/best.pt', s='weights/pretrained.pt'): # from utils.utils import *; create_pretrained()
658
- # create pretrained checkpoint 's' from 'f' (create_pretrained(x, x) for x in glob.glob('./*.pt'))
659
  x = torch.load(f, map_location=torch.device('cpu'))
660
  x['optimizer'] = None
661
  x['training_results'] = None
662
  x['epoch'] = -1
663
  x['model'].half() # to FP16
664
  for p in x['model'].parameters():
665
- p.requires_grad = True
666
- torch.save(x, s)
667
- print('%s saved as pretrained checkpoint %s, %.1fMB' % (f, s, os.path.getsize(s) / 1E6))
 
668
 
669
 
670
  def coco_class_count(path='../coco/labels/train2014/'):
 
645
  return output
646
 
647
 
648
+ def strip_optimizer(f='weights/best.pt', s=''): # from utils.utils import *; strip_optimizer()
649
+ # Strip optimizer from 'f' to finalize training, optionally save as 's'
 
 
 
 
 
 
 
 
 
650
  x = torch.load(f, map_location=torch.device('cpu'))
651
  x['optimizer'] = None
652
  x['training_results'] = None
653
  x['epoch'] = -1
654
  x['model'].half() # to FP16
655
  for p in x['model'].parameters():
656
+ p.requires_grad = False
657
+ torch.save(x, s or f)
658
+ mb = os.path.getsize(s or f) / 1E6 # filesize
659
+ print('Optimizer stripped from %s,%s %.1fMB' % (f, (' saved as %s,' % s) if s else '', mb))
660
 
661
 
662
  def coco_class_count(path='../coco/labels/train2014/'):