import torch
from ignite.metrics import Metric
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import sync_all_reduce, reinit__is_reduced
from ais.model import LandMarkVertebraDecoder


class AISLandmarkMetric(Metric):
    def __init__(self, num=2, down_ratio=4, output_transform=lambda x: x, device=None):
        self.num = num
        self.decoder = LandMarkVertebraDecoder(self.num, conf_thresh=0.2)
        self.down_ratio = down_ratio
        self._total_score = 0
        self._num_examples = 0
        super(AISLandmarkMetric, self).__init__(output_transform=output_transform, device=device)

    @reinit__is_reduced
    def reset(self):
        self._total_score = 0
        self._num_examples = 0
        super(AISLandmarkMetric, self).reset()

    @reinit__is_reduced
    def update(self, output):
        y_pred, y = output
        decoder_pts = self.decoder.det_decode(y_pred['heatmap'], y_pred['corner_offset'], y_pred['center_offset'])

        try:
            decoder_pts[:, :, :10] *= self.down_ratio
        except Exception as e:
            print('error:', e)
            print(y_pred.shape)
            print(decoder_pts.shape)
            print(decoder_pts)

        sort_ind = torch.argsort(decoder_pts[:, :, 1], dim=1)
        sort_pts = []
        for i in range(decoder_pts.shape[0]):
            sort_pts.append(decoder_pts[i][sort_ind[i]])
        sort_pts = torch.stack(sort_pts, dim=0)
        # org_pts = decoder_pts[sort_ind]
        # pr_landmarks = []
        # for i, pt in enumerate(sort_pts):
        #     pr_landmarks.append(pt[2:4])
        #     pr_landmarks.append(pt[4:6])
        #     pr_landmarks.append(pt[6:8])
        #     pr_landmarks.append(pt[8:10])
        pr_landmarks = sort_pts[:,:, 2:10]
        pr_landmarks = pr_landmarks.reshape((-1, self.num * 4, 2))
        gt_landmarks = y['landmark']

        lm_dist = torch.sqrt((pr_landmarks[:,:,0] - gt_landmarks[:,:,0])**2 + (pr_landmarks[:,:,1] - gt_landmarks[:,:,1])**2)
        lm_dist_mean = torch.mean(lm_dist, dim=1)

        self._total_score += torch.sum(lm_dist_mean).item()
        self._num_examples += lm_dist_mean.shape[0]

    @sync_all_reduce("_total_score", "self._num_examples")
    def compute(self):
        if self._num_examples == 0:
            raise NotComputableError("AISLandmarkMetric must have at least one example before it can be computed.")
        return self._total_score / self._num_examples




