glenn-jocher commited on
Commit
dc54ed5
1 Parent(s): b8a4bab

`--freeze` fix (#6044)

Browse files

Fix for https://github.com/ultralytics/yolov5/issues/6038

Files changed (1) hide show
  1. train.py +3 -3
train.py CHANGED
@@ -60,7 +60,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
60
  device,
61
  callbacks
62
  ):
63
- save_dir, epochs, batch_size, weights, single_cls, evolve, data, cfg, resume, noval, nosave, workers, freeze, = \
64
  Path(opt.save_dir), opt.epochs, opt.batch_size, opt.weights, opt.single_cls, opt.evolve, opt.data, opt.cfg, \
65
  opt.resume, opt.noval, opt.nosave, opt.workers, opt.freeze
66
 
@@ -124,7 +124,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
124
  model = Model(cfg, ch=3, nc=nc, anchors=hyp.get('anchors')).to(device) # create
125
 
126
  # Freeze
127
- freeze = [f'model.{x}.' for x in (freeze if isinstance(freeze, list) else range(freeze))] # layers to freeze
128
  for k, v in model.named_parameters():
129
  v.requires_grad = True # train all layers
130
  if any(x in k for x in freeze):
@@ -469,7 +469,7 @@ def parse_opt(known=False):
469
  parser.add_argument('--linear-lr', action='store_true', help='linear LR')
470
  parser.add_argument('--label-smoothing', type=float, default=0.0, help='Label smoothing epsilon')
471
  parser.add_argument('--patience', type=int, default=100, help='EarlyStopping patience (epochs without improvement)')
472
- parser.add_argument('--freeze', nargs='+', type=int, default=0, help='Freeze layers: backbone=10, first3=0 1 2')
473
  parser.add_argument('--save-period', type=int, default=-1, help='Save checkpoint every x epochs (disabled if < 1)')
474
  parser.add_argument('--local_rank', type=int, default=-1, help='DDP parameter, do not modify')
475
 
 
60
  device,
61
  callbacks
62
  ):
63
+ save_dir, epochs, batch_size, weights, single_cls, evolve, data, cfg, resume, noval, nosave, workers, freeze = \
64
  Path(opt.save_dir), opt.epochs, opt.batch_size, opt.weights, opt.single_cls, opt.evolve, opt.data, opt.cfg, \
65
  opt.resume, opt.noval, opt.nosave, opt.workers, opt.freeze
66
 
 
124
  model = Model(cfg, ch=3, nc=nc, anchors=hyp.get('anchors')).to(device) # create
125
 
126
  # Freeze
127
+ freeze = [f'model.{x}.' for x in (freeze if len(freeze) > 1 else range(freeze[0]))] # layers to freeze
128
  for k, v in model.named_parameters():
129
  v.requires_grad = True # train all layers
130
  if any(x in k for x in freeze):
 
469
  parser.add_argument('--linear-lr', action='store_true', help='linear LR')
470
  parser.add_argument('--label-smoothing', type=float, default=0.0, help='Label smoothing epsilon')
471
  parser.add_argument('--patience', type=int, default=100, help='EarlyStopping patience (epochs without improvement)')
472
+ parser.add_argument('--freeze', nargs='+', type=int, default=[0], help='Freeze layers: backbone=10, first3=0 1 2')
473
  parser.add_argument('--save-period', type=int, default=-1, help='Save checkpoint every x epochs (disabled if < 1)')
474
  parser.add_argument('--local_rank', type=int, default=-1, help='DDP parameter, do not modify')
475