from copy import deepcopy

import numpy as np
import torch
from numpy.typing import ArrayLike

from .distributed import Group, LOCAL
from .metric import Metric


class SumMetric(Metric[torch.Tensor | ArrayLike | None, torch.Tensor | ArrayLike]):
    def __init__(self,
        value: torch.Tensor | ArrayLike | None = None, *,
        group: Group | None = None,
    ):
        self._value = value
        self._group = group

    def compute(self) -> torch.Tensor | ArrayLike | None:
        if self._value is None:
            return None
        else:
            return self._value

    def update(self, patch: torch.Tensor | ArrayLike):
        if isinstance(patch, torch.Tensor):
            patch = patch.clone()
        if isinstance(patch, np.ndarray):
            patch = patch.copy()

        if self._group is not LOCAL and torch.distributed.is_initialized():
            torch.distributed.all_reduce(patch, torch.distributed.ReduceOp.SUM, self._group)

        if self._value is None:
            self._value = patch
        else:
            self._value += patch

    def reset(self, value: torch.Tensor | ArrayLike | None = None):
        self._value = value
