File size: 1,318 Bytes
9384dae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
{
"device": "$torch.device(f'cuda:{dist.get_rank()}')",
"gnetwork": {
"_target_": "torch.nn.parallel.DistributedDataParallel",
"module": "$@autoencoder_def.to(@device)",
"device_ids": [
"@device"
]
},
"dnetwork": {
"_target_": "torch.nn.parallel.DistributedDataParallel",
"module": "$@discriminator_def.to(@device)",
"device_ids": [
"@device"
]
},
"train#sampler": {
"_target_": "DistributedSampler",
"dataset": "@train#dataset",
"even_divisible": true,
"shuffle": true
},
"train#dataloader#sampler": "@train#sampler",
"train#dataloader#shuffle": false,
"train#trainer#train_handlers": "$@train#handlers[: -2 if dist.get_rank() > 0 else None]",
"initialize": [
"$import torch.distributed as dist",
"$dist.is_initialized() or dist.init_process_group(backend='nccl')",
"$torch.cuda.set_device(@device)",
"$monai.utils.set_determinism(seed=123)",
"$import logging",
"$@train#trainer.logger.setLevel(logging.WARNING if dist.get_rank() > 0 else logging.INFO)"
],
"run": [
"$@train#trainer.run()"
],
"finalize": [
"$dist.is_initialized() and dist.destroy_process_group()"
]
}
|