#! /usr/bin/env python

from __future__ import absolute_import, division, print_function, unicode_literals
import os
import torch
import torch.distributed as dist
import time


def env2int(env_list, default=-1):
    for e in env_list:
        val = int(os.environ.get(e, -1))
        if val >= 0:
            return val
    return default


def sample_allred(rank, devRank, size):
    curDev = torch.device("cuda", devRank)
    A = torch.ones([100], device=curDev)
    before = A[0].item()
    dist.all_reduce(A)
    after = A[0].item()
    print("\t before: %s after: %s " % (before, after))


def init_process(rank, devRank, host, master_ip, size, fn, backend='nccl'):
    """ Initialize the distributed environment. """
    os.environ['MASTER_ADDR'] = str(master_ip)  # '100.97.65.101'
    os.environ['MASTER_PORT'] = '29500'
    dist.init_process_group(backend, rank=rank, world_size=size)
    fn(rank, devRank, size)


def init(rank=-1, size=-1):
    global myreq
    global my_rank
    global my_size
    global my_local_rank
    global my_local_size

    import argparse
    parser = argparse.ArgumentParser(
        description="MPI functionality testing.."
    )
    parser.add_argument("--master-ip", type=str, default='127.0.0.1')  # alternative is DLRM mode.
    args = parser.parse_args()

    # Works for MAST, IMPI, OMPI and MVAPICH2
    size = env2int(
        ["PMI_SIZE", "OMPI_COMM_WORLD_SIZE", "MV2_COMM_WORLD_SIZE"], 1
    )

    global_rank = env2int(
        ["PMI_RANK", "OMPI_COMM_WORLD_RANK", "MV2_COMM_WORLD_RANK"], 0
    )

    my_local_rank = env2int(
        [
            "MPI_LOCALRANKID",
            "OMPI_COMM_WORLD_LOCAL_RANK",
            "MV2_COMM_WORLD_LOCAL_RANK",
        ],
        0,
    )

    my_local_size = env2int(
        [
            "MPI_LOCALNRANKS",
            "OMPI_COMM_WORLD_LOCAL_SIZE",
            "MV2_COMM_WORLD_LOCAL_SIZE",
        ],
        1,
    )

    universe_size = env2int(["OMPI_UNIVERSE_SIZE"], 0)
    node_rank = env2int(["OMPI_COMM_WORLD_NODE_RANK"], 9)

    myhost = os.uname()[1]
    print("\t size: %d global_rank: %d node_rank: %d my_local_size: %d my_local_rank: %d running on host: %s " % (
        size, global_rank, node_rank, my_local_size, my_local_rank, myhost))
    init_process(global_rank, my_local_rank, myhost, args.master_ip, size, sample_allred, )


if __name__ == "__main__":
    init()
