# https://pytorch.org/docs/stable/notes/ddp.html

import os
import torch
import torch.distributed as dist
from multiprocessing import Semaphore
import torch.multiprocessing as mp
import torch.nn as nn
import torch.optim as optim
from torch.nn.parallel import DistributedDataParallel as DDP
from PyCmpltrtok.common import sep


def example(rank, world_size, sema):
    flag = f'{rank}/{world_size}'
    with sema:
        sep(f'{flag} start')
    # create default process group
    # dist.init_process_group("gloo", rank=rank, world_size=world_size)
    dist.init_process_group("nccl", rank=rank, world_size=world_size)
    # create local model
    model = nn.Linear(10, 10).to(rank)
    # construct DDP model
    # ddp_model = DDP(model, device_ids=[rank])
    ddp_model = DDP(model, device_ids=None)
    # define loss function and optimizer
    loss_fn = nn.MSELoss()
    optimizer = optim.SGD(ddp_model.parameters(), lr=0.001)

    # forward pass
    outputs = ddp_model(torch.randn(20, 10).to(rank))
    with sema:
        print(flag, '20x10 ->', outputs.shape)
    labels = torch.randn(20, 10).to(rank)
    # backward pass
    loss_fn(outputs, labels).backward()
    # update parameters
    optimizer.step()
    with sema:
        sep(f'{flag} end')


def main(sema):
    world_size = 2
    with sema:
        sep('spawn started (joined)')
    mp.spawn(example,
        args=(world_size, sema, ),
        nprocs=world_size,
        # join=False,  # FileNotFoundError: [Errno 2] No such file or directory
        join=True,  # torch.multiprocessing.spawn.ProcessExitedException: process 0 terminated with signal SIGSEGV
     )
    with sema:
        sep('spawn over')


if __name__=="__main__":
    # Environment variables which need to be
    # set when using c10d's default "env"
    # initialization mode.

    """
    ValueError: Error initializing torch.distributed using env:// rendezvous: environment variable MASTER_ADDR expected, but not set
    ValueError: Error initializing torch.distributed using env:// rendezvous: environment variable MASTER_PORT expected, but not set
    """

    os.environ["MASTER_ADDR"] = "localhost"
    os.environ["MASTER_PORT"] = "29500"

    sema = Semaphore(1)
    main(sema)
