import torch

from ignite.metrics import Metric

from ignite.exceptions import NotComputableError
from ignite.metrics.metric import sync_all_reduce, reinit__is_reduced

class AvgL2(Metric):


    def __init__(self, output_transform=lambda x: x, device="cpu"):
        self._num_sum = 0
        self._num_examples = 0
        super(AvgL2, self).__init__(output_transform=output_transform, device=device)


    @reinit__is_reduced
    def reset(self):
        self._num_sum = 0
        self._num_examples = 0
        super().reset()


    @reinit__is_reduced
    def update(self, output):
        if output[0] is None:
            return
        l2 = output[0].detach().tolist()

        self._num_sum += sum(l2)
        self._num_examples += len(l2)


    @sync_all_reduce("_num_examples", "_num_sum:SUM")
    def compute(self):
        if self._num_examples == 0:
            raise NotComputableError("AvgL2 has 0 sample!")

        res = self._num_sum / self._num_examples

        return res


if __name__ == "__main__":
    m = AvgL2()

    dis = torch.randint(6, (4, 10)).float()
    print(dis)

    for i in range(dis.size(0)):
        m.update((dis[i], None))
    
    res = m.compute()
    print(res, dis.mean())


