# Copyright 2023 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""GraphCastTrainer"""
from mindspore import Model, nn
from mindspore.train.loss_scale_manager import DynamicLossScaleManager, FixedLossScaleManager

from .pretrain import Trainer

from .callback import EvaluateCallBack
from .net_with_clip import TrainOneStepCell

class GraphCastTrainer(Trainer):
    r"""
    Self-define forecast model inherited from `Trainer`.

    Args:
        config (dict): parameters for training.
        model (Cell): network for training.
        loss_fn (str): user-defined loss function.
        logger (logging.RootLogger): tools for logging.

    Supported Platforms:
        ``Ascend`` ``GPU``

    """
    def __init__(self, config, model, loss_fn, logger):
        super().__init__(config, model, loss_fn, logger, weather_data_source="ECMWFIFS")
        self.train_dataset, self.valid_dataset = self.get_dataset()
        self.pred_cb = self.get_callback()
        self.solver = self.get_solver()
        self.config = config

    def get_solver(self):
        """
        define the solver of the model, abstract method.
        """
        # loss_scale = DynamicLossScaleManager() # init_loss_scale=2**10, scale_window=200
        loss_scale = FixedLossScaleManager(loss_scale=self.config.get('train').get('loss_scale'))
        solver = Model(network=self.loss_fn,
                       optimizer=self.optimizer,
                       loss_scale_manager=loss_scale,
                       amp_level=self.train_params.get('amp_level', 'O2'),
                       )
        return solver
    
    # def get_solver(self):
    #     loss_scale = nn.DynamicLossScaleUpdateCell(loss_scale_value=2**12, scale_factor=2, scale_window=1000) # DynamicLossScaleManager()
    #     loss_with_clip = TrainOneStepCell(self.loss_fn, self.optimizer, loss_scale, enable_clip_grad=self.config['optimizer']['enable_clip_grad'],
    #                                       gradient_clip_value=self.config['optimizer']['gradient_clip_value'])
    #     solver = Model(network=loss_with_clip,) #amp_level=self.train_params['amp_level'],
    #     return solver
        

    def get_callback(self):
        """
        define the callback of the model, abstract method.
        """
        pred_cb = EvaluateCallBack(self.model, self.valid_dataset, self.config, self.logger)
        return pred_cb
