import datetime
import os
import time
from pathlib import Path
import argparse
from omegaconf import OmegaConf
from mindspore import load_checkpoint
from mindspore.nn import DynamicLossScaleUpdateCell, AdamWeightDecay
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig
from mindspore.train.model import Model

from models import build_model

from data_utils import build_dataset

# from misc_utils.wrapper import MFTrainOneStepCell
from mindformers.wrapper import MFTrainOneStepCell
from misc_utils.lr_sched import LearningRate
from misc_utils.logger import create_logger, LossMonitor
import misc_utils.misc as misc
import misc_utils.lr_decay as lr_decay
from constants import LOG_ROOT


def main(args):
    args.rank, args.world_size = misc.init_context(args)
    logger = create_logger(args)
    logger.info(f'job dir: {args.output_dir}')

    model = build_model(args)

    num_params = sum([param.size for param in model.get_parameters() if param.requires_grad])
    logger.info(f'Number of Trainable Parameters: {num_params}')
    dataset_train = build_dataset(args)
    per_step_size = dataset_train.get_dataset_size()

    lr_schedule = LearningRate(
        args.lr, args.min_lr,
        args.epochs, args.warmup_epochs, dataset_train.get_dataset_size()
    )
    # following timm: set wd as 0 for bias and norm layers
    param_groups = lr_decay.add_weight_decay(model, args.weight_decay)
    optimizer = AdamWeightDecay(param_groups,
                                learning_rate=lr_schedule,
                                beta1=args.beta1,
                                beta2=args.beta2)

    wrapper = MFTrainOneStepCell(
        model,
        optimizer,
        use_clip_grad=args.clip_grad,
        max_grad_norm=args.max_norm,
        scale_sense=DynamicLossScaleUpdateCell(loss_scale_value=2.0 ** 16, scale_factor=2, scale_window=1000)
    )
    if args.resume_from:
        load_checkpoint(args.resume_from, net=wrapper)
        logger.info(f'Load ckpt from {args.resume_from}')
    trainer = Model(wrapper)

    # build callbacks
    callbacks = [LossMonitor(args=args, logger=logger.info), ]
    if args.rank == 0:
        save_ckpt_feq = int(args.save_freq * per_step_size)
        config_ck = CheckpointConfig(save_checkpoint_steps=save_ckpt_feq,
                                     keep_checkpoint_max=100000000,
                                     integrated_save=False)
        ckpoint_cb = ModelCheckpoint(directory=args.output_dir,
                                     config=config_ck)
        callbacks += [ckpoint_cb, ]

    logger.info(f'Experiment type: {args.exp_type}')
    logger.info(f"Start training for epoch [{args.start_epoch}, {args.epochs})")
    logger.info(f'Num steps: {per_step_size}')
    logger.info(f"num_prefix_tokens: {args.num_prefix_tokens}")
    start_time = time.time()

    trainer.train(args.epochs, dataset_train, callbacks=callbacks, dataset_sink_mode=True, sink_size=args.sink_size)

    total_time = time.time() - start_time
    total_time_str = str(datetime.timedelta(seconds=int(total_time)))
    logger.info('Training time {}'.format(total_time_str))


def create_parser():
    parser = argparse.ArgumentParser('Med Foundation Models')
    parser.add_argument("--config", default="", metavar="FILE", help="path to config file")
    parser.add_argument('--output_dir', default=LOG_ROOT)
    parser.add_argument('--exp_type', type=str, default='mae')
    parser.add_argument('--exp_name', type=str, default='debug')
    parser.add_argument(
        "opts",
        default=None,
        nargs=argparse.REMAINDER,
    )
    return parser


if __name__ == '__main__':
    args = create_parser().parse_args()
    args.exp_name = args.exp_type + '_' + args.exp_name
    # args.output_dir = os.path.join(args.output_dir, args.exp_name)
    print('Output Dir:', args.output_dir)
    Path(args.output_dir).mkdir(parents=True, exist_ok=True)

    args.opts += [f"output_dir={args.output_dir}", f"exp_type={args.exp_type}"]
    default_cfg = OmegaConf.load('configs/default.yaml')
    cfg = OmegaConf.load(args.config)
    cfg = OmegaConf.merge(default_cfg, cfg, OmegaConf.from_cli(args.opts))
    with open(os.path.join(args.output_dir, 'config.yaml'), "w") as f:
        OmegaConf.save(config=cfg, f=f)

    print(f"cfg = {cfg}")
    main(cfg)
