import torch


def torch_distributed_get_info(group: torch.distributed.ProcessGroup | None = None):
    try:
        process_rank = torch.distributed.get_rank(group)
        processes_num = torch.distributed.get_world_size(group)
    except ValueError:
        process_rank = 0
        processes_num = 1
    return process_rank, processes_num


def torch_distributed_is_rank0(group: torch.distributed.ProcessGroup | None = None):
    process_rank, _ = torch_distributed_get_info(group)
    return process_rank == 0
