import os

from mindspore import nn
from misc.metric_tool import ConfuseMatrixMeter
import mindspore as ms
from mindspore import ops
from mindspore.train.callback import Callback
import stat
from mindspore import log as logger
from mindspore import save_checkpoint


class ChangeFormerEval(nn.Cell):
    def __init__(self, net, eval_activate='argmax'):
        super(ChangeFormerEval, self).__init__()
        self.net = net
        self.transpose = ops.Transpose()
        self.squeeze = ops.Squeeze(axis=0)
        self.softmax = ops.Softmax(axis=1)
        self.argmax = ops.Argmax(axis=1)

    def construct(self, imgAB):
        outputs = self.net(imgAB)
        pred = outputs[-1]
        pred = self.argmax(pred)
        return pred


class TempLoss(nn.Cell):
    """A temp loss cell."""

    def __init__(self):
        super(TempLoss, self).__init__()
        self.identity = ops.Identity()

    def construct(self, logits, label):
        return self.identity(logits)


def apply_eval(eval_param_dict):
    """run Evaluation"""
    model = eval_param_dict["model"]
    dataset = eval_param_dict["dataset"]
    metrics_name = eval_param_dict["metrics_name"]
    eval_score = model.eval(dataset, dataset_sink_mode=False)["CF_metric"]
    return eval_score


class CF_metric(nn.Metric):
    def __init__(self, num_class):
        super(CF_metric, self).__init__()
        self.running_metric = ConfuseMatrixMeter(n_class=num_class)
        self.clear()

    def clear(self):
        self.score_dict = None  # 记录所有指标

    def update(self, *inputs):
        pred = inputs[0]
        label = inputs[1]
        self.running_metric.update_cm(pred.asnumpy(), gt=label.asnumpy())

    def eval(self):
        self.score_dict = self.running_metric.get_scores()
        print("==================Evaluation===================")
        print(self.score_dict)
        iou_1 = self.score_dict['iou_1']
        return iou_1

class EvalCallBack(Callback):

    def __init__(self, eval_function, eval_param_dict, interval=1, eval_start_epoch=1, save_best_ckpt=True,
                 ckpt_directory="./", besk_ckpt_name="best.ckpt", metrics_name="iou"):
        super(EvalCallBack, self).__init__()
        self.eval_param_dict = eval_param_dict
        self.eval_function = eval_function
        self.eval_start_epoch = eval_start_epoch
        if interval < 1:
            raise ValueError("interval should >= 1.")
        self.interval = interval
        self.save_best_ckpt = save_best_ckpt
        self.best_res = 0
        self.best_epoch = 0
        if not os.path.isdir(ckpt_directory):
            os.makedirs(ckpt_directory)
        self.bast_ckpt_path = os.path.join(ckpt_directory, besk_ckpt_name)
        self.metrics_name = metrics_name

    def remove_ckpoint_file(self, file_name):
        """Remove the specified checkpoint file from this checkpoint manager and also from the directory."""
        try:
            os.chmod(file_name, stat.S_IWRITE)
            os.remove(file_name)
        except OSError:
            logger.warning("OSError, failed to remove the older ckpt file %s.", file_name)
        except ValueError:
            logger.warning("ValueError, failed to remove the older ckpt file %s.", file_name)

    def epoch_end(self, run_context):
        """Callback when epoch end."""
        cb_params = run_context.original_args()
        cur_epoch = cb_params.cur_epoch_num
        if cur_epoch >= self.eval_start_epoch and (cur_epoch - self.eval_start_epoch) % self.interval == 0:
            res = self.eval_function(self.eval_param_dict)
            print("epoch: {}, {}: {}".format(cur_epoch, self.metrics_name, res), flush=True)
            if res >= self.best_res:
                self.best_res = res
                self.best_epoch = cur_epoch
                print("update best result: {}".format(res), flush=True)
                if self.save_best_ckpt:
                    if os.path.exists(self.bast_ckpt_path):
                        self.remove_ckpoint_file(self.bast_ckpt_path)
                    save_checkpoint(cb_params.train_network, self.bast_ckpt_path)
                    print("update best checkpoint at: {}".format(self.bast_ckpt_path), flush=True)

    def end(self, run_context):
        print("End training, the best {0} is: {1}, the best {0} epoch is {2}".format(self.metrics_name,
                                                                                     self.best_res,
                                                                                     self.best_epoch), flush=True)

