import sys

# sys.path.insert(0, "../../python")
sys.path.insert(0, '/home/xiaomin/wxm/mxnet/python')

import mxnet as mx
import logging
import numpy as np
from mxnet.metric import EvalMetric
import cv2 as cv
from mxnet import ndarray
import numpy
from sklearn.metrics import roc_auc_score


class CompositeEvalMetric(EvalMetric):
    """Manage multiple evaluation metrics."""

    def __init__(self, **kwargs):
        super(CompositeEvalMetric, self).__init__('composite')
        try:
            self.metrics = kwargs['metrics']
        except KeyError:
            self.metrics = []

    def add(self, metric):
        """
        Add a child metric
        :param metric: Evaluation metric
        """
        self.metrics.append(metric)

    def get_metric(self, index):
        """
        Get a child metric
        :param index: Dst metric index
        :return:
        """
        try:
            return self.metrics[index]
        except IndexError:
            return ValueError("Metric index {} is out of range 0 and {}".format(
                index, len(self.metrics)))

    def update(self, labels, preds):
        for metric in self.metrics:
            metric.update(labels, preds)

    def reset(self):
        try:
            for metric in self.metrics:
                metric.reset()
        except AttributeError:
            pass

    def get(self):
        names = []
        results = []
        for metric in self.metrics:
            result = metric.get()
            names.append(result[0])
            results.append(result[1])
        return names, results

    def print_log(self):
        names, results = self.get()
        logging.info('; '.join(['{}: {}'.format(name, val) for name, val in zip(names, results)]))


def check_label_shapes(labels, preds, shape=0):
    """Check to see if the two arrays are the same size."""
    if shape == 0:
        label_shape, pred_shape = len(labels), len(preds)
    else:
        label_shape, pred_shape = labels.shape, preds.shape

    if label_shape != pred_shape:
        raise ValueError("Shape of labels {} does not match shape of "
                         "predictions {}".format(label_shape, pred_shape))


class AccWithIgnoreMetric(EvalMetric):
    def __init__(self, ignore_label, name=None):
        super(AccWithIgnoreMetric, self).__init__(name='Accuracy')
        self._ignore_label = ignore_label
        self._iter_size = 200
        self._nomin_buffer = []
        self._denom_buffer = []

    def update(self, labels, preds):
        check_label_shapes(labels, preds)
        for i in range(len(labels)):
            pred_label = mx.ndarray.argmax_channel(preds[i]).asnumpy().astype('int32')
            label = labels[i].asnumpy().astype('int32')

            check_label_shapes(label, pred_label)

            if len(self._nomin_buffer) >= self._iter_size:
                self._nomin_buffer.pop(0)
                self._denom_buffer.pop(0)
            else:
                self._nomin_buffer.append((pred_label.flat == label.flat).sum())
                self._denom_buffer.append(len(pred_label.flat) - (label.flat == self._ignore_label).sum())

            self.sum_metric = np.sum(self._nomin_buffer) * 1.0
            self.num_inst = np.sum(self._denom_buffer)


class F1(EvalMetric):
    """Calculate the F1 score of a binary classification problem."""

    def __init__(self):
        super(F1, self).__init__('f1')

    def update(self, labels, preds):
        check_label_shapes(labels, preds)

        for label, pred in zip(labels, preds):
            pred = pred.asnumpy()
            label = label.asnumpy().astype('int32')
            pred_label = numpy.argmax(pred, axis=1)

            check_label_shapes(label, pred)
            if len(numpy.unique(label)) > 2:
                raise ValueError("F1 currently only supports binary classification.")

            true_positives, false_positives, false_negatives = 0., 0., 0.

            for y_pred, y_true in zip(pred_label, label):
                y_true = np.argmax(y_true)
                if y_pred == 1 and y_true == 1:
                    true_positives += 1.
                elif y_pred == 1 and y_true == 0:
                    false_positives += 1.
                elif y_pred == 0 and y_true == 1:
                    false_negatives += 1.

            if true_positives + false_positives > 0:
                precision = true_positives / (true_positives + false_positives)
            else:
                precision = 0.

            if true_positives + false_negatives > 0:
                recall = true_positives / (true_positives + false_negatives)
            else:
                recall = 0.

            if precision + recall > 0:
                f1_score = 2 * precision * recall / (precision + recall)
            else:
                f1_score = 0.

            self.sum_metric += f1_score
            self.num_inst += 1


class Accuracy(EvalMetric):
    """Calculate accuracy"""

    def __init__(self):
        super(Accuracy, self).__init__('accuracy')

    def update(self, labels, preds):
        check_label_shapes(labels, preds)
        for label, pred_label in zip(labels, preds):
            pred_label = mx.ndarray.argmax_channel(pred_label).asnumpy().astype('int32')
            label = label.asnumpy().astype('int32')

            check_label_shapes(label, pred_label)
            label = numpy.argmax(label)

            self.sum_metric += (pred_label.flat == label.flat).sum()
            self.num_inst += len(pred_label.flat)


class AUC(EvalMetric):
    """Calculate AUC"""

    def __init__(self):
        super(AUC, self).__init__('AUC')

    def update(self, labels, preds):
        check_label_shapes(labels, preds)
        for label, pred in zip(labels, preds):
            pred = pred.asnumpy()
            # pred = pred[:, 1]
            # print pred.shape

            label = label.asnumpy().astype('int32')
            # label = label[:, 1]
            # print label.shape
            # print label
            # print len(np.unique(label))

            auc = roc_auc_score(label, pred, average='macro')

            self.sum_metric += auc
            self.num_inst += 1


class IoUMetric(EvalMetric):
    def __init__(self, ignore_label, label_num, name=None):
        super(IoUMetric, self).__init__(name='IoU')
        self._ignore_label = ignore_label
        self._label_num = label_num
        self._iter_size = 200
        self._iou_buffer = []

    def update(self, labels, preds):
        check_label_shapes(labels, preds)
        for i in range(len(labels)):
            pred_label = mx.ndarray.argmax_channel(preds[i]).asnumpy().astype('int32')
            label = labels[i].asnumpy().astype('int32')

            check_label_shapes(label, pred_label)

            iou = 0
            skip_label_num = 0
            for j in range(self._label_num):
                pred_cur = (pred_label.flat == j)
                gt_cur = (label.flat == j)
                tp = np.logical_and(pred_cur, gt_cur).sum()
                denom = np.logical_or(pred_cur, gt_cur).sum()
                assert tp <= denom
                if gt_cur.sum():
                    iou += tp * 1.0 / denom
                else:
                    skip_label_num += 1
            iou /= (self._label_num - skip_label_num+0.0001)
            if len(self._iou_buffer) >= self._iter_size:
                self._iou_buffer.pop(0)
            else:
                self._iou_buffer.append(iou)

            self.sum_metric = np.mean(self._iou_buffer)
            self.num_inst = 1


class SoftmaxLoss(EvalMetric):
    def __init__(self, ignore_label, label_num, name=None):
        super(SoftmaxLoss, self).__init__(name="SoftmaxLoss")
        self._ignore_label = ignore_label
        self._label_num = label_num
        self._iter_size = 200
        self._loss_buffer = []

    def update(self, labels, preds):
        check_label_shapes(labels, preds)

        loss = 0.0
        cnt = 0.0
        eps = 1e-6
        for i in range(len(labels)):
            prediction = preds[i].asnumpy()[:]
            prediction += eps
            shape = prediction.shape
            if len(shape) == 4:
                shape = (shape[0], shape[1], shape[2]*shape[3])
                prediction = prediction.reshape(shape)
            label = labels[i].asnumpy()
            soft_label = np.zeros(prediction.shape)
            for b in range(soft_label.shape[0]):
                for c in range(self._label_num):
                    soft_label[b][c][label[b] == c] = 1.0

            loss += (-np.log(prediction[soft_label==1])).sum()
            cnt += prediction[soft_label==1].size

        if len(self._loss_buffer) >= self._iter_size:
            self._loss_buffer.pop(0)
        if cnt == 0:
            self._loss_buffer.append(np.mean(self._loss_buffer))
        else:
            self._loss_buffer.append(loss/cnt)

        self.sum_metric = np.mean(self._loss_buffer)
        self.num_inst = 1
