import torch

from ignite.metrics import Metric

from ignite.exceptions import NotComputableError
from ignite.metrics.metric import sync_all_reduce, reinit__is_reduced

class CMR(Metric):


    def __init__(self, output_transform=lambda x: x, device="cpu", thresh=[1, 2, 5]):
        self._thresh = thresh
        self._num_correct = None
        self._num_examples = None
        super(CMR, self).__init__(output_transform=output_transform, device=device)


    @reinit__is_reduced
    def reset(self):
        self._num_correct = [0] * len(self._thresh)
        self._num_examples = 0
        super().reset()


    @reinit__is_reduced
    def update(self, output):
        if output[0] is None:
            return
        l2 = output[0].detach().tolist()

        for i, t in enumerate(self._thresh):
            self._num_correct[i] += len(list(filter(
                lambda x: x<=t, l2
            )))
        
        self._num_examples += len(l2)


    @sync_all_reduce("_num_examples", "_num_correct:SUM")
    def compute(self):
        if self._num_examples == 0:
            raise NotComputableError("CMR has 0 sample!")
        res = list(map(
            lambda x: x/self._num_examples, self._num_correct
        ))

        tmp = []
        for t, r in zip(self._thresh, res):
            tmp.append(str(t))
            tmp.append(r)

        return tmp


if __name__ == "__main__":
    m = CMR()

    dis = torch.randint(6, (4, 10)).float()
    print(dis)

    for i in range(dis.size(0)):
        m.update((dis[i], None))
    
    res = m.compute()
    print(res)


