import torch, os
from torch.distributed import init_process_group, destroy_process_group


def ddp_setup(rank, world_size):
    os.environ["MASTER_ADDR"] = "127.0.0.1"
    os.environ["MASTER_PORT"] = "1235"
    print("Rank ##########################################################",rank)
    init_process_group(backend="nccl", rank=rank, world_size=world_size)
    torch.cuda.set_device(rank)

def ddp_setup_torchrun(world_size):
    #os.environ["MASTER_ADDR"] = "127.0.0.1"
    #os.environ["MASTER_PORT"] = "29500"
    # the port must be 29500
    rank = int(os.environ["LOCAL_RANK"])
    init_process_group(backend="nccl", rank=rank, world_size=world_size)
    torch.cuda.set_device(rank)
    print(os.environ["MASTER_ADDR"],os.environ["MASTER_PORT"])
    print("Rank distributed ###############################################",rank)
    return rank
