# Copyright 2023 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""earth with loss"""
import os
import numpy as np

import mindspore as ms
import mindspore.numpy as msnp
from mindspore import dtype as mstype
from mindspore import ops, nn, Tensor
from mindspore.train.callback import Callback
from mindspore.train.summary import SummaryRecord

from .forecast import WeatherForecast
from .ice_visual import plt_metrics

def _forecast_multi_step(inputs, model, feature_dims, mask_dims, t_out, t_in):
    """Forecast multiple steps with given inputs"""
    total_dims = feature_dims + mask_dims
    pred_list, data_list = [], []
    for _ in range(t_out):
        pred = ops.cast(model(inputs), inputs.dtype)
        inputs = inputs.squeeze()
        pred_list.append(pred)
        if t_out > 1:
            if t_in == 1:
                inputs = ops.cat((pred, inputs[..., feature_dims:total_dims]), axis=-1)
            else:
                pred_with_mask = ops.cat((pred, inputs[..., feature_dims:total_dims]), axis=-1)
                inputs = ops.cat((inputs[..., total_dims:total_dims * t_in], pred_with_mask), axis=-1).reshape(-1, total_dims * t_in)
    return pred_list


class InferenceModule(WeatherForecast):
    """
    Perform multiple rounds of model inference.
    """

    def __init__(self, model, config, logger):
        super(InferenceModule, self).__init__(model, config, logger)
        self.model = model
        data_params = config.get('data')
        self.config = config
        self.feature_dims = data_params.get('feature_dims')
        self.t_out_test = data_params.get('t_out_test')
        self.logger = logger
        self.batch_size = data_params.get('batch_size')
        self.t_in = data_params.get('t_in')
        self.mask_dims = data_params.get('mask_dims')
        self.plt_key_info = config.get('summary').get('plt_key_info')

    def _get_metrics(self, inputs, labels, data_length=0):
        """Get lat_weight_rmse and lat_weight_acc metrics"""
        pred = self.forecast(inputs)
        pred = ops.stack(pred, 0)
        pred = ops.squeeze(pred).asnumpy()
        sic_pred = pred[:, 0]
        sic_pred_categori = np.where(sic_pred>0.15, 1, 0)
        labels = labels.asnumpy()
        sic_label = labels[0, ..., 0] # batch_size=1

        if self.plt_key_info:
            file_name = f"SeaIceConcentration_{str(data_length)}.png"
            cur_sic_pred = sic_pred
            greater_than_1_idx = np.where(cur_sic_pred > 1.0)
            cur_sic_pred[greater_than_1_idx] = 1.0

            lower_than_1_idx = np.where(cur_sic_pred < 0.0)
            cur_sic_pred[lower_than_1_idx] = 0.0

            plt_metrics(sic_label, cur_sic_pred, fig_name=os.path.join(self.config.get('summary').get('summary_dir'), file_name))

        sic_label_categori = np.where(sic_label > 0.15, 1, 0)
        true_num = np.sum(sic_pred_categori==sic_label_categori)
        acc = true_num / sic_pred_categori.shape[0]

        return acc

    def forecast(self, inputs):
        pred_list = _forecast_multi_step(inputs, self.model, self.feature_dims, self.mask_dims, self.t_out_test, self.t_in)
        return pred_list
    
    def eval(self, dataset):
        '''
        Eval the model using test dataset or validation dataset.

        Args:
            dataset (mindspore.dataset): The dataset for eval, including inputs and labels.
        '''
        self.logger.info("================================Start Evaluation================================")
        data_length = 0
        acc = 0
        for data in dataset.create_dict_iterator():
            inputs = data['inputs']
            batch_size = inputs.shape[0]
            labels = data['labels']
            acc += self._get_metrics(inputs, labels, data_length=data_length)
            data_length += batch_size

        self.logger.info(f'test dataset size: {data_length}')

        acc = acc / data_length
        self.logger.info(f"Current eval accuracy is: {(acc*100):.2f}%")
        self.logger.info("================================End Evaluation================================")
        return acc


class LossNet(nn.Cell):
    """ LossNet definition """

    def __init__(self, feature_dims, mask=None, t_in=1, is_mask=False):
        super().__init__()
        self.feature_dims = feature_dims
        self.is_mask = is_mask
        self.t_in = t_in
        self.mask = mask
        if mask is not None:
            self.ice_num = np.sum(mask)

    def construct(self, label, pred):
        # ops.print_(f"label shape: {label.shape}, pred shape: {pred.shape}")
        pred = ops.cast(pred, mstype.float32)
        # ops.print(label)
        # label = ops.cast(label, mstype.float32)
        # mask = label[..., self.feature_dims:(self.feature_dims+1)]
        # label = ops.squeeze(label[..., :self.feature_dims])
        pred = ops.squeeze(pred)
        err = msnp.square(pred - label)
        
        # ice_num = msnp.sum(mask)
        # ops.print_(f"tin: {self.t_in}, feature_dims: {self.feature_dims}, ice_num: {ice_num}")
        # ops.print_(f"err shape: {err.shape}")
        # ops.print_(f"mask shape: {mask.shape}")
        # err = ops.cast(err, mstype.float16)
        if self.is_mask:
            err_mask = err*Tensor(self.mask)
            # ops.print_(f"err_mask: {err_mask[10000]}")
            loss = msnp.sum(err_mask)/(self.t_in*self.feature_dims*self.ice_num)
        else:
            loss = msnp.average(err)
        return loss


class EvaluateCallBack(Callback):
    """
    Monitor the prediction accuracy in training.
    """

    def __init__(self,
                 model,
                 valid_dataset,
                 config,
                 logger,
                 ):
        super(EvaluateCallBack, self).__init__()
        self.config = config
        summary_params = config.get('summary')
        self.summary_dir = summary_params.get('summary_dir')
        self.predict_interval = summary_params.get('eval_interval')
        self.logger = logger
        self.valid_dataset = valid_dataset
        self.eval_net = InferenceModule(model, config, logger=self.logger)
        self.eval_time = 0

    def __enter__(self):
        self.summary_record = SummaryRecord(self.summary_dir)
        return self

    def __exit__(self, *exc_args):
        self.summary_record.close()

    def epoch_end(self, run_context):
        """
        Evaluate the model at the end of epoch.

        Args:
            run_context (RunContext): Context of the train running.
        """
        cb_params = run_context.original_args()
        if cb_params.cur_epoch_num % self.predict_interval == 0:
            self.eval_time += 1
            self.eval_net.eval(self.valid_dataset)


class CustomWithLossCell(nn.Cell):
    """
    custom loss
    """

    def __init__(self, backbone, loss_fn, data_params):
        super(CustomWithLossCell, self).__init__(auto_prefix=False)
        self._backbone = backbone
        self._loss_fn = loss_fn

        self.feature_dims = data_params.get('feature_dims')
        self.mask_dims = data_params.get('mask_dims')
        self.t_out_train = data_params.get('t_out_train')
        self.t_in = data_params.get('t_in')

    def construct(self, data, labels):
        """Custom loss forward function"""
        pred_list = _forecast_multi_step(data, self._backbone, self.feature_dims, self.mask_dims, self.t_out_train, self.t_in)
        loss = 0
        # ops.print_(f"labels shape: {labels.shape}")
        for t in range(self.t_out_train):
            pred = pred_list[t]
            if self.t_out_train == 1:
                label = ops.squeeze(labels)
            else:
                label = ops.squeeze(labels[:, t])
            loss_step = self._loss_fn(label, pred)
            # ops.print_(f'pred shape: {pred.shape}')
            loss += loss_step
        loss = loss / self.t_out_train
        return loss
