glenn-jocher
commited on
Commit
•
0822cda
1
Parent(s):
89c7a5b
Update caching (#1496)
Browse files- train.py +1 -1
- utils/datasets.py +1 -1
train.py
CHANGED
@@ -412,7 +412,7 @@ if __name__ == '__main__':
|
|
412 |
parser = argparse.ArgumentParser()
|
413 |
parser.add_argument('--weights', type=str, default='yolov5s.pt', help='initial weights path')
|
414 |
parser.add_argument('--cfg', type=str, default='', help='model.yaml path')
|
415 |
-
parser.add_argument('--data', type=str, default='data/
|
416 |
parser.add_argument('--hyp', type=str, default='data/hyp.scratch.yaml', help='hyperparameters path')
|
417 |
parser.add_argument('--epochs', type=int, default=300)
|
418 |
parser.add_argument('--batch-size', type=int, default=16, help='total batch size for all GPUs')
|
|
|
412 |
parser = argparse.ArgumentParser()
|
413 |
parser.add_argument('--weights', type=str, default='yolov5s.pt', help='initial weights path')
|
414 |
parser.add_argument('--cfg', type=str, default='', help='model.yaml path')
|
415 |
+
parser.add_argument('--data', type=str, default='data/voc.yaml', help='data.yaml path')
|
416 |
parser.add_argument('--hyp', type=str, default='data/hyp.scratch.yaml', help='hyperparameters path')
|
417 |
parser.add_argument('--epochs', type=int, default=300)
|
418 |
parser.add_argument('--batch-size', type=int, default=16, help='total batch size for all GPUs')
|
utils/datasets.py
CHANGED
@@ -365,7 +365,7 @@ class LoadImagesAndLabels(Dataset): # for training/testing
|
|
365 |
cache_path = Path(self.label_files[0]).parent.with_suffix('.cache') # cached labels
|
366 |
if cache_path.is_file():
|
367 |
cache = torch.load(cache_path) # load
|
368 |
-
if cache['hash'] != get_hash(self.label_files + self.img_files): # dataset changed
|
369 |
cache = self.cache_labels(cache_path) # re-cache
|
370 |
else:
|
371 |
cache = self.cache_labels(cache_path) # cache
|
|
|
365 |
cache_path = Path(self.label_files[0]).parent.with_suffix('.cache') # cached labels
|
366 |
if cache_path.is_file():
|
367 |
cache = torch.load(cache_path) # load
|
368 |
+
if cache['hash'] != get_hash(self.label_files + self.img_files + [cache_path]): # dataset changed
|
369 |
cache = self.cache_labels(cache_path) # re-cache
|
370 |
else:
|
371 |
cache = self.cache_labels(cache_path) # cache
|