File size: 213 Bytes
751936e
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
import torch

def print_rank_0(message):
    if torch.distributed.is_initialized():
        if torch.distributed.get_rank() == 0:
            print(message, flush=True)
    else:
        print(message, flush=True)