glenn-jocher
commited on
Commit
•
d5966c9
1
Parent(s):
ff8646c
Namespace `VERBOSE` env variable to `YOLOv5_VERBOSE` (#6428)
Browse files- hubconf.py +6 -6
- utils/general.py +27 -27
- utils/plots.py +3 -3
hubconf.py
CHANGED
@@ -12,10 +12,10 @@ import torch
|
|
12 |
|
13 |
|
14 |
def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None):
|
15 |
-
"""Creates a
|
16 |
|
17 |
Arguments:
|
18 |
-
name (str): name
|
19 |
pretrained (bool): load pretrained weights into the model
|
20 |
channels (int): number of input channels
|
21 |
classes (int): number of model classes
|
@@ -24,19 +24,19 @@ def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbo
|
|
24 |
device (str, torch.device, None): device to use for model parameters
|
25 |
|
26 |
Returns:
|
27 |
-
YOLOv5
|
28 |
"""
|
29 |
from pathlib import Path
|
30 |
|
31 |
from models.common import AutoShape, DetectMultiBackend
|
32 |
from models.yolo import Model
|
33 |
from utils.downloads import attempt_download
|
34 |
-
from utils.general import check_requirements, intersect_dicts,
|
35 |
from utils.torch_utils import select_device
|
36 |
|
|
|
|
|
37 |
check_requirements(exclude=('tensorboard', 'thop', 'opencv-python'))
|
38 |
-
set_logging(verbose=verbose)
|
39 |
-
|
40 |
name = Path(name)
|
41 |
path = name.with_suffix('.pt') if name.suffix == '' else name # checkpoint path
|
42 |
try:
|
|
|
12 |
|
13 |
|
14 |
def _create(name, pretrained=True, channels=3, classes=80, autoshape=True, verbose=True, device=None):
|
15 |
+
"""Creates or loads a YOLOv5 model
|
16 |
|
17 |
Arguments:
|
18 |
+
name (str): model name 'yolov5s' or path 'path/to/best.pt'
|
19 |
pretrained (bool): load pretrained weights into the model
|
20 |
channels (int): number of input channels
|
21 |
classes (int): number of model classes
|
|
|
24 |
device (str, torch.device, None): device to use for model parameters
|
25 |
|
26 |
Returns:
|
27 |
+
YOLOv5 model
|
28 |
"""
|
29 |
from pathlib import Path
|
30 |
|
31 |
from models.common import AutoShape, DetectMultiBackend
|
32 |
from models.yolo import Model
|
33 |
from utils.downloads import attempt_download
|
34 |
+
from utils.general import LOGGER, check_requirements, intersect_dicts, logging
|
35 |
from utils.torch_utils import select_device
|
36 |
|
37 |
+
if not verbose:
|
38 |
+
LOGGER.setLevel(logging.WARNING)
|
39 |
check_requirements(exclude=('tensorboard', 'thop', 'opencv-python'))
|
|
|
|
|
40 |
name = Path(name)
|
41 |
path = name.with_suffix('.pt') if name.suffix == '' else name # checkpoint path
|
42 |
try:
|
utils/general.py
CHANGED
@@ -36,7 +36,7 @@ from utils.metrics import box_iou, fitness
|
|
36 |
FILE = Path(__file__).resolve()
|
37 |
ROOT = FILE.parents[1] # YOLOv5 root directory
|
38 |
NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads
|
39 |
-
VERBOSE = str(os.getenv('
|
40 |
|
41 |
torch.set_printoptions(linewidth=320, precision=5, profile='long')
|
42 |
np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5
|
@@ -241,20 +241,20 @@ def check_online():
|
|
241 |
def check_git_status():
|
242 |
# Recommend 'git pull' if code is out of date
|
243 |
msg = ', for updates see https://github.com/ultralytics/yolov5'
|
244 |
-
|
245 |
-
assert Path('.git').exists(), 'skipping check (not a git repository)' + msg
|
246 |
-
assert not is_docker(), 'skipping check (Docker image)' + msg
|
247 |
-
assert check_online(), 'skipping check (offline)' + msg
|
248 |
|
249 |
cmd = 'git fetch && git config --get remote.origin.url'
|
250 |
url = check_output(cmd, shell=True, timeout=5).decode().strip().rstrip('.git') # git fetch
|
251 |
branch = check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode().strip() # checked out
|
252 |
n = int(check_output(f'git rev-list {branch}..origin/master --count', shell=True)) # commits behind
|
253 |
if n > 0:
|
254 |
-
s
|
255 |
else:
|
256 |
-
s
|
257 |
-
|
258 |
|
259 |
|
260 |
def check_python(minimum='3.6.2'):
|
@@ -294,21 +294,21 @@ def check_requirements(requirements=ROOT / 'requirements.txt', exclude=(), insta
|
|
294 |
except Exception as e: # DistributionNotFound or VersionConflict if requirements not met
|
295 |
s = f"{prefix} {r} not found and is required by YOLOv5"
|
296 |
if install:
|
297 |
-
|
298 |
try:
|
299 |
assert check_online(), f"'pip install {r}' skipped (offline)"
|
300 |
-
|
301 |
n += 1
|
302 |
except Exception as e:
|
303 |
-
|
304 |
else:
|
305 |
-
|
306 |
|
307 |
if n: # if packages updated
|
308 |
source = file.resolve() if 'file' in locals() else requirements
|
309 |
s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \
|
310 |
f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n"
|
311 |
-
|
312 |
|
313 |
|
314 |
def check_img_size(imgsz, s=32, floor=0):
|
@@ -318,7 +318,7 @@ def check_img_size(imgsz, s=32, floor=0):
|
|
318 |
else: # list i.e. img_size=[640, 480]
|
319 |
new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz]
|
320 |
if new_size != imgsz:
|
321 |
-
|
322 |
return new_size
|
323 |
|
324 |
|
@@ -333,7 +333,7 @@ def check_imshow():
|
|
333 |
cv2.waitKey(1)
|
334 |
return True
|
335 |
except Exception as e:
|
336 |
-
|
337 |
return False
|
338 |
|
339 |
|
@@ -363,9 +363,9 @@ def check_file(file, suffix=''):
|
|
363 |
url = str(Path(file)).replace(':/', '://') # Pathlib turns :// -> :/
|
364 |
file = Path(urllib.parse.unquote(file).split('?')[0]).name # '%2F' to '/', split https://url.com/file.txt?auth
|
365 |
if Path(file).is_file():
|
366 |
-
|
367 |
else:
|
368 |
-
|
369 |
torch.hub.download_url_to_file(url, file)
|
370 |
assert Path(file).exists() and Path(file).stat().st_size > 0, f'File download failed: {url}' # check
|
371 |
return file
|
@@ -407,23 +407,23 @@ def check_dataset(data, autodownload=True):
|
|
407 |
if val:
|
408 |
val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path
|
409 |
if not all(x.exists() for x in val):
|
410 |
-
|
411 |
if s and autodownload: # download script
|
412 |
root = path.parent if 'path' in data else '..' # unzip directory i.e. '../'
|
413 |
if s.startswith('http') and s.endswith('.zip'): # URL
|
414 |
f = Path(s).name # filename
|
415 |
-
|
416 |
torch.hub.download_url_to_file(s, f)
|
417 |
Path(root).mkdir(parents=True, exist_ok=True) # create root
|
418 |
ZipFile(f).extractall(path=root) # unzip
|
419 |
Path(f).unlink() # remove zip
|
420 |
r = None # success
|
421 |
elif s.startswith('bash '): # bash script
|
422 |
-
|
423 |
r = os.system(s)
|
424 |
else: # python script
|
425 |
r = exec(s, {'yaml': data}) # return None
|
426 |
-
|
427 |
else:
|
428 |
raise Exception('Dataset not found.')
|
429 |
|
@@ -445,13 +445,13 @@ def download(url, dir='.', unzip=True, delete=True, curl=False, threads=1):
|
|
445 |
if Path(url).is_file(): # exists in current path
|
446 |
Path(url).rename(f) # move to dir
|
447 |
elif not f.exists():
|
448 |
-
|
449 |
if curl:
|
450 |
os.system(f"curl -L '{url}' -o '{f}' --retry 9 -C -") # curl download, retry and resume on fail
|
451 |
else:
|
452 |
torch.hub.download_url_to_file(url, f, progress=True) # torch download
|
453 |
if unzip and f.suffix in ('.zip', '.gz'):
|
454 |
-
|
455 |
if f.suffix == '.zip':
|
456 |
ZipFile(f).extractall(path=dir) # unzip
|
457 |
elif f.suffix == '.gz':
|
@@ -744,7 +744,7 @@ def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, classes=Non
|
|
744 |
|
745 |
output[xi] = x[i]
|
746 |
if (time.time() - t) > time_limit:
|
747 |
-
|
748 |
break # time limit exceeded
|
749 |
|
750 |
return output
|
@@ -763,7 +763,7 @@ def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_op
|
|
763 |
p.requires_grad = False
|
764 |
torch.save(x, s or f)
|
765 |
mb = os.path.getsize(s or f) / 1E6 # filesize
|
766 |
-
|
767 |
|
768 |
|
769 |
def print_mutation(results, hyp, save_dir, bucket):
|
@@ -786,8 +786,8 @@ def print_mutation(results, hyp, save_dir, bucket):
|
|
786 |
f.write(s + ('%20.5g,' * n % vals).rstrip(',') + '\n')
|
787 |
|
788 |
# Print to screen
|
789 |
-
|
790 |
-
|
791 |
|
792 |
# Save yaml
|
793 |
with open(evolve_yaml, 'w') as f:
|
|
|
36 |
FILE = Path(__file__).resolve()
|
37 |
ROOT = FILE.parents[1] # YOLOv5 root directory
|
38 |
NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLOv5 multiprocessing threads
|
39 |
+
VERBOSE = str(os.getenv('YOLOv5_VERBOSE', True)).lower() == 'true' # global verbose mode
|
40 |
|
41 |
torch.set_printoptions(linewidth=320, precision=5, profile='long')
|
42 |
np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5
|
|
|
241 |
def check_git_status():
|
242 |
# Recommend 'git pull' if code is out of date
|
243 |
msg = ', for updates see https://github.com/ultralytics/yolov5'
|
244 |
+
s = colorstr('github: ') # string
|
245 |
+
assert Path('.git').exists(), s + 'skipping check (not a git repository)' + msg
|
246 |
+
assert not is_docker(), s + 'skipping check (Docker image)' + msg
|
247 |
+
assert check_online(), s + 'skipping check (offline)' + msg
|
248 |
|
249 |
cmd = 'git fetch && git config --get remote.origin.url'
|
250 |
url = check_output(cmd, shell=True, timeout=5).decode().strip().rstrip('.git') # git fetch
|
251 |
branch = check_output('git rev-parse --abbrev-ref HEAD', shell=True).decode().strip() # checked out
|
252 |
n = int(check_output(f'git rev-list {branch}..origin/master --count', shell=True)) # commits behind
|
253 |
if n > 0:
|
254 |
+
s += f"⚠️ YOLOv5 is out of date by {n} commit{'s' * (n > 1)}. Use `git pull` or `git clone {url}` to update."
|
255 |
else:
|
256 |
+
s += f'up to date with {url} ✅'
|
257 |
+
LOGGER.info(emojis(s)) # emoji-safe
|
258 |
|
259 |
|
260 |
def check_python(minimum='3.6.2'):
|
|
|
294 |
except Exception as e: # DistributionNotFound or VersionConflict if requirements not met
|
295 |
s = f"{prefix} {r} not found and is required by YOLOv5"
|
296 |
if install:
|
297 |
+
LOGGER.info(f"{s}, attempting auto-update...")
|
298 |
try:
|
299 |
assert check_online(), f"'pip install {r}' skipped (offline)"
|
300 |
+
LOGGER.info(check_output(f"pip install '{r}'", shell=True).decode())
|
301 |
n += 1
|
302 |
except Exception as e:
|
303 |
+
LOGGER.warning(f'{prefix} {e}')
|
304 |
else:
|
305 |
+
LOGGER.info(f'{s}. Please install and rerun your command.')
|
306 |
|
307 |
if n: # if packages updated
|
308 |
source = file.resolve() if 'file' in locals() else requirements
|
309 |
s = f"{prefix} {n} package{'s' * (n > 1)} updated per {source}\n" \
|
310 |
f"{prefix} ⚠️ {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n"
|
311 |
+
LOGGER.info(emojis(s))
|
312 |
|
313 |
|
314 |
def check_img_size(imgsz, s=32, floor=0):
|
|
|
318 |
else: # list i.e. img_size=[640, 480]
|
319 |
new_size = [max(make_divisible(x, int(s)), floor) for x in imgsz]
|
320 |
if new_size != imgsz:
|
321 |
+
LOGGER.warning(f'WARNING: --img-size {imgsz} must be multiple of max stride {s}, updating to {new_size}')
|
322 |
return new_size
|
323 |
|
324 |
|
|
|
333 |
cv2.waitKey(1)
|
334 |
return True
|
335 |
except Exception as e:
|
336 |
+
LOGGER.warning(f'WARNING: Environment does not support cv2.imshow() or PIL Image.show() image displays\n{e}')
|
337 |
return False
|
338 |
|
339 |
|
|
|
363 |
url = str(Path(file)).replace(':/', '://') # Pathlib turns :// -> :/
|
364 |
file = Path(urllib.parse.unquote(file).split('?')[0]).name # '%2F' to '/', split https://url.com/file.txt?auth
|
365 |
if Path(file).is_file():
|
366 |
+
LOGGER.info(f'Found {url} locally at {file}') # file already exists
|
367 |
else:
|
368 |
+
LOGGER.info(f'Downloading {url} to {file}...')
|
369 |
torch.hub.download_url_to_file(url, file)
|
370 |
assert Path(file).exists() and Path(file).stat().st_size > 0, f'File download failed: {url}' # check
|
371 |
return file
|
|
|
407 |
if val:
|
408 |
val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path
|
409 |
if not all(x.exists() for x in val):
|
410 |
+
LOGGER.info('\nDataset not found, missing paths: %s' % [str(x) for x in val if not x.exists()])
|
411 |
if s and autodownload: # download script
|
412 |
root = path.parent if 'path' in data else '..' # unzip directory i.e. '../'
|
413 |
if s.startswith('http') and s.endswith('.zip'): # URL
|
414 |
f = Path(s).name # filename
|
415 |
+
LOGGER.info(f'Downloading {s} to {f}...')
|
416 |
torch.hub.download_url_to_file(s, f)
|
417 |
Path(root).mkdir(parents=True, exist_ok=True) # create root
|
418 |
ZipFile(f).extractall(path=root) # unzip
|
419 |
Path(f).unlink() # remove zip
|
420 |
r = None # success
|
421 |
elif s.startswith('bash '): # bash script
|
422 |
+
LOGGER.info(f'Running {s} ...')
|
423 |
r = os.system(s)
|
424 |
else: # python script
|
425 |
r = exec(s, {'yaml': data}) # return None
|
426 |
+
LOGGER.info(f"Dataset autodownload {f'success, saved to {root}' if r in (0, None) else 'failure'}\n")
|
427 |
else:
|
428 |
raise Exception('Dataset not found.')
|
429 |
|
|
|
445 |
if Path(url).is_file(): # exists in current path
|
446 |
Path(url).rename(f) # move to dir
|
447 |
elif not f.exists():
|
448 |
+
LOGGER.info(f'Downloading {url} to {f}...')
|
449 |
if curl:
|
450 |
os.system(f"curl -L '{url}' -o '{f}' --retry 9 -C -") # curl download, retry and resume on fail
|
451 |
else:
|
452 |
torch.hub.download_url_to_file(url, f, progress=True) # torch download
|
453 |
if unzip and f.suffix in ('.zip', '.gz'):
|
454 |
+
LOGGER.info(f'Unzipping {f}...')
|
455 |
if f.suffix == '.zip':
|
456 |
ZipFile(f).extractall(path=dir) # unzip
|
457 |
elif f.suffix == '.gz':
|
|
|
744 |
|
745 |
output[xi] = x[i]
|
746 |
if (time.time() - t) > time_limit:
|
747 |
+
LOGGER.warning(f'WARNING: NMS time limit {time_limit}s exceeded')
|
748 |
break # time limit exceeded
|
749 |
|
750 |
return output
|
|
|
763 |
p.requires_grad = False
|
764 |
torch.save(x, s or f)
|
765 |
mb = os.path.getsize(s or f) / 1E6 # filesize
|
766 |
+
LOGGER.info(f"Optimizer stripped from {f},{(' saved as %s,' % s) if s else ''} {mb:.1f}MB")
|
767 |
|
768 |
|
769 |
def print_mutation(results, hyp, save_dir, bucket):
|
|
|
786 |
f.write(s + ('%20.5g,' * n % vals).rstrip(',') + '\n')
|
787 |
|
788 |
# Print to screen
|
789 |
+
LOGGER.info(colorstr('evolve: ') + ', '.join(f'{x.strip():>20s}' for x in keys))
|
790 |
+
LOGGER.info(colorstr('evolve: ') + ', '.join(f'{x:20.5g}' for x in vals) + '\n\n')
|
791 |
|
792 |
# Save yaml
|
793 |
with open(evolve_yaml, 'w') as f:
|
utils/plots.py
CHANGED
@@ -57,7 +57,7 @@ def check_font(font='Arial.ttf', size=10):
|
|
57 |
return ImageFont.truetype(str(font) if font.exists() else font.name, size)
|
58 |
except Exception as e: # download if missing
|
59 |
url = "https://ultralytics.com/assets/" + font.name
|
60 |
-
|
61 |
torch.hub.download_url_to_file(url, str(font), progress=False)
|
62 |
try:
|
63 |
return ImageFont.truetype(str(font), size)
|
@@ -143,7 +143,7 @@ def feature_visualization(x, module_type, stage, n=32, save_dir=Path('runs/detec
|
|
143 |
ax[i].imshow(blocks[i].squeeze()) # cmap='gray'
|
144 |
ax[i].axis('off')
|
145 |
|
146 |
-
|
147 |
plt.savefig(f, dpi=300, bbox_inches='tight')
|
148 |
plt.close()
|
149 |
np.save(str(f.with_suffix('.npy')), x[0].cpu().numpy()) # npy save
|
@@ -417,7 +417,7 @@ def plot_results(file='path/to/results.csv', dir=''):
|
|
417 |
# if j in [8, 9, 10]: # share train and val loss y axes
|
418 |
# ax[i].get_shared_y_axes().join(ax[i], ax[i - 5])
|
419 |
except Exception as e:
|
420 |
-
|
421 |
ax[1].legend()
|
422 |
fig.savefig(save_dir / 'results.png', dpi=200)
|
423 |
plt.close()
|
|
|
57 |
return ImageFont.truetype(str(font) if font.exists() else font.name, size)
|
58 |
except Exception as e: # download if missing
|
59 |
url = "https://ultralytics.com/assets/" + font.name
|
60 |
+
LOGGER.info(f'Downloading {url} to {font}...')
|
61 |
torch.hub.download_url_to_file(url, str(font), progress=False)
|
62 |
try:
|
63 |
return ImageFont.truetype(str(font), size)
|
|
|
143 |
ax[i].imshow(blocks[i].squeeze()) # cmap='gray'
|
144 |
ax[i].axis('off')
|
145 |
|
146 |
+
LOGGER.info(f'Saving {f}... ({n}/{channels})')
|
147 |
plt.savefig(f, dpi=300, bbox_inches='tight')
|
148 |
plt.close()
|
149 |
np.save(str(f.with_suffix('.npy')), x[0].cpu().numpy()) # npy save
|
|
|
417 |
# if j in [8, 9, 10]: # share train and val loss y axes
|
418 |
# ax[i].get_shared_y_axes().join(ax[i], ax[i - 5])
|
419 |
except Exception as e:
|
420 |
+
LOGGER.info(f'Warning: Plotting error for {f}: {e}')
|
421 |
ax[1].legend()
|
422 |
fig.savefig(save_dir / 'results.png', dpi=200)
|
423 |
plt.close()
|