# Copyright (c) OpenMMLab. All rights reserved.
from collections import OrderedDict
from typing import List, Sequence

import numpy as np
import torch
from mmengine.evaluator import BaseMetric
from mmengine.logging import MMLogger, print_log
from prettytable import PrettyTable
from mmseg.registry import METRICS


@METRICS.register_module()
class MultiHeadSegMetric(BaseMetric):
    """多头分割任务评估器，每个 head 单独计算指标.
    
    Args:
        iou_metrics (list): 要计算的指标类型 ['mIoU', 'mDice', 'mAcc'].
        ignore_index (int): 忽略的标签 id.
        label_keys (list): GT 标签在 SegDataSample 中的字段名.
        pred_keys (list): 预测结果在 SegDataSample 中的字段名.
        prefixes (list): 每个 head 的前缀名.
        print_background (bool): 是否在 summary 和 per-class 结果中包含 background 类.
    """

    def __init__(self,
                 iou_metrics: List[str] = ['mDice'],
                 ignore_index: int = 255,  # 改为255，不忽略背景类
                 label_keys: List[str] = ['gt_seg_map_lcx', 'gt_seg_map_lad'],
                 pred_keys: List[str] = ['pred_seg_map_head0', 'pred_seg_map_head1'],
                 prefixes: List[str] = ['lcx', 'lad'],
                 print_background: bool = False):
        super().__init__()
        self.iou_metrics = iou_metrics
        self.ignore_index = ignore_index
        self.label_keys = label_keys
        self.pred_keys = pred_keys
        self.prefixes = prefixes
        self.print_background = print_background

        assert len(label_keys) == len(pred_keys) == len(prefixes), \
            'label_keys, pred_keys, prefixes 长度必须一致'

        # 累积器 (每个 head 单独维护)
        self.total_area_intersect = {p: torch.zeros(0) for p in prefixes}
        self.total_area_union = {p: torch.zeros(0) for p in prefixes}
        self.total_area_pred_label = {p: torch.zeros(0) for p in prefixes}
        self.total_area_label = {p: torch.zeros(0) for p in prefixes}

    def process(self, data_batch, data_samples: Sequence):
        """收集每个 batch 的预测和标签."""
        num_classes = len(self.dataset_meta['classes'])
        for label_key, pred_key, prefix in zip(
                self.label_keys, self.pred_keys, self.prefixes):

            for sample in data_samples:
                if label_key not in sample or pred_key not in sample:
                    continue

                label = sample[label_key]['data'].squeeze().to('cpu')
                pred = sample[pred_key].data.squeeze().to('cpu')

                # 计算交并比等统计量
                area_intersect, area_union, area_pred_label, area_label = \
                    self.intersect_and_union(pred, label, num_classes, self.ignore_index)

                # 初始化 shape
                if self.total_area_intersect[prefix].numel() == 0:
                    self.total_area_intersect[prefix] = area_intersect
                    self.total_area_union[prefix] = area_union
                    self.total_area_pred_label[prefix] = area_pred_label
                    self.total_area_label[prefix] = area_label
                else:
                    self.total_area_intersect[prefix] += area_intersect
                    self.total_area_union[prefix] += area_union
                    self.total_area_pred_label[prefix] += area_pred_label
                    self.total_area_label[prefix] += area_label

    def compute_metrics(self, results):
        """汇总所有 head 的指标."""
        logger: MMLogger = MMLogger.get_current_instance()
        metrics = OrderedDict()

        class_names = self.dataset_meta['classes']

        for prefix in self.prefixes:
            intersect = self.total_area_intersect[prefix]
            union = self.total_area_union[prefix]
            pred_hist = self.total_area_pred_label[prefix]
            label_hist = self.total_area_label[prefix]

            # 转 numpy
            intersect = intersect.numpy() if isinstance(intersect, torch.Tensor) else np.array(intersect)
            union = union.numpy() if isinstance(union, torch.Tensor) else np.array(union)
            pred_hist = pred_hist.numpy() if isinstance(pred_hist, torch.Tensor) else np.array(pred_hist)
            label_hist = label_hist.numpy() if isinstance(label_hist, torch.Tensor) else np.array(label_hist)

            # 计算指标
            ret_metrics, ret_metrics_class = self.total_area_to_metrics(
                intersect, union, pred_hist, label_hist,
                metrics=self.iou_metrics,
                ignore_index=self.ignore_index,
                print_background=self.print_background)

            # -------- summary (平均值) --------
            for k, v in ret_metrics.items():
                metrics[f'{prefix}_{k}'] = v

            # -------- per-class table --------
            class_table = PrettyTable()

            if self.print_background:
                valid_class_names = class_names
                values_slice = slice(None)
            else:
                valid_class_names = class_names[1:]  # 跳过 background
                values_slice = slice(1, None)

            class_table.add_column('Class', valid_class_names)

            for k, v in ret_metrics_class.items():
                values = np.round(v[values_slice] * 100, 2).tolist()
                if len(values) != len(valid_class_names):
                    values = ['-'] * len(valid_class_names)
                class_table.add_column(k, values)

            print_log(f'Per-class results for {prefix}:', logger)
            print_log('\n' + class_table.get_string(), logger=logger)

        print_log(f'MultiHead summary metrics: {metrics}', logger=logger)
        return metrics

    @staticmethod
    def intersect_and_union(pred_label: torch.Tensor,
                            label: torch.Tensor,
                            num_classes: int,
                            ignore_index: int):
        """计算交并比相关统计量."""
        mask = (label != ignore_index)
        pred_label = pred_label[mask]
        label = label[mask]

        intersect = pred_label[pred_label == label]
        area_intersect = torch.histc(intersect.float(), bins=num_classes, min=0,
                                     max=num_classes - 1).cpu()
        area_pred_label = torch.histc(pred_label.float(), bins=num_classes, min=0,
                                      max=num_classes - 1).cpu()
        area_label = torch.histc(label.float(), bins=num_classes, min=0,
                                 max=num_classes - 1).cpu()
        area_union = area_pred_label + area_label - area_intersect
        return area_intersect, area_union, area_pred_label, area_label

    @staticmethod
    def total_area_to_metrics(total_area_intersect,
                              total_area_union,
                              total_area_pred_label,
                              total_area_label,
                              metrics=['mDice'],
                              ignore_index=255,  # 改为255，不忽略背景类
                              print_background=False):
        """根据累计的统计量计算最终指标."""

        ret_metrics = OrderedDict()
        ret_metrics_class = OrderedDict()

        # ---------- 选择要计算的类 ----------
        if print_background:
            valid = slice(None)     # 全部类别
        else:
            valid = slice(1, None)  # 跳过背景

        # overall accuracy (全局算，不受 print_background 影响)
        all_acc = total_area_intersect.sum() / (total_area_label.sum() + 1e-10)
        ret_metrics['aAcc'] = float(all_acc)

        if 'mIoU' in metrics:
            iou = total_area_intersect / (total_area_union + 1e-10)
            acc = total_area_intersect / (total_area_label + 1e-10)
            ret_metrics['mIoU'] = float(np.nanmean(iou[valid]))
            ret_metrics['mAcc'] = float(np.nanmean(acc[valid]))
            ret_metrics_class['IoU'] = iou
            ret_metrics_class['Acc'] = acc

        if 'mDice' in metrics:
            dice = 2 * total_area_intersect / (
                total_area_pred_label + total_area_label + 1e-10)
            acc = total_area_intersect / (total_area_label + 1e-10)
            ret_metrics['mDice'] = float(np.nanmean(dice[valid]))
            ret_metrics['mAcc'] = float(np.nanmean(acc[valid]))
            ret_metrics_class['Dice'] = dice
            ret_metrics_class['Acc'] = acc

        return ret_metrics, ret_metrics_class
