import os
from typing import Any, Callable, TypeVar

import torch

T = TypeVar("T")


def torch_distributed_load(
    f: str | os.PathLike, *,
    map_location: Callable[[torch.Tensor, str], torch.Tensor] | torch.device | str | dict[str, str] | None = None,
    process_rank: int = 0,
    process_group_size: int = 1,
    process_rank_src: int = 0,
    process_device: torch.device | str | None = None,
):
    if process_rank == process_rank_src:
        state = torch.load(f, map_location=map_location)
        state_list = [state]
    else:
        state_list = [None]
    if process_group_size > 1:
        torch.distributed.broadcast_object_list(state_list, src=process_rank_src, device=process_device)
    return state_list[0]


def torch_distributed_broadcast_object(
    obj: T,
    src: int = 0,
    group: Any = None,
    device: Any = None
) -> T:
    object_list = [obj]
    torch.distributed.broadcast_object_list(object_list, src, group, device)
    obj, = object_list
    return obj
