glenn-jocher
commited on
Commit
•
8b5086c
1
Parent(s):
63157d2
Multi-GPU default to single device 0 (#3554)
Browse files* Multi-GPU default to single device 0
* Multi-GPU default to single device 0
* add space
- utils/torch_utils.py +2 -2
utils/torch_utils.py
CHANGED
@@ -72,11 +72,11 @@ def select_device(device='', batch_size=None):
|
|
72 |
|
73 |
cuda = not cpu and torch.cuda.is_available()
|
74 |
if cuda:
|
75 |
-
devices = device.split(',') if device else range(torch.cuda.device_count()) # i.e. 0,1,6,7
|
76 |
n = len(devices) # device count
|
77 |
if n > 1 and batch_size: # check batch_size is divisible by device_count
|
78 |
assert batch_size % n == 0, f'batch-size {batch_size} not multiple of GPU count {n}'
|
79 |
-
space = ' ' * len(s)
|
80 |
for i, d in enumerate(devices):
|
81 |
p = torch.cuda.get_device_properties(i)
|
82 |
s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / 1024 ** 2}MB)\n" # bytes to MB
|
|
|
72 |
|
73 |
cuda = not cpu and torch.cuda.is_available()
|
74 |
if cuda:
|
75 |
+
devices = device.split(',') if device else '0' # range(torch.cuda.device_count()) # i.e. 0,1,6,7
|
76 |
n = len(devices) # device count
|
77 |
if n > 1 and batch_size: # check batch_size is divisible by device_count
|
78 |
assert batch_size % n == 0, f'batch-size {batch_size} not multiple of GPU count {n}'
|
79 |
+
space = ' ' * (len(s) + 1)
|
80 |
for i, d in enumerate(devices):
|
81 |
p = torch.cuda.get_device_properties(i)
|
82 |
s += f"{'' if i == 0 else space}CUDA:{d} ({p.name}, {p.total_memory / 1024 ** 2}MB)\n" # bytes to MB
|