import re
from math import ceil
from typing import Dict
from mindnlp.engine import Trainer, Evaluator
from mindspore import nn, Model
from mindnlp.metrics import Accuracy
from callback import LogCallback
import mindspore.dataset as ds
from mindspore.nn.learning_rate_schedule import WarmUpLR
from mindnlp.transformers import PreTrainedModel, BertForMaskedLM
from copy import deepcopy
from mindspore import save_checkpoint
import wandb
import pickle
from mindnlp.engine.callbacks import BestModelCallback
import numpy as np
import mindspore as ms

class Runner:
    def __init__(self, args, model: PreTrainedModel, dataset: Dict, base_model: PreTrainedModel = None, idx=None, server_model = None, tokenizer = None) -> None:
        self.args = args
        self.model = model
        self.dataset = dataset
        self.base_model = base_model
        self.server_model = server_model
        self.tokenizer = tokenizer
        self.divided_depths = args.divided_depths
        self.meta_divided_depths = args.meta_divided_depths
        self.idx = idx
        self.metric = Accuracy()

    def do_train(self):
        optimizer = nn.AdamWeightDecay(self.model.trainable_params(), learning_rate=self.args.lr, weight_decay=self.args.weight_decay)
        self.loss_fn = None
        callbacks = [
            LogCallback(args=self.args, idx=self.idx), 
        ]
        if self.dataset['val'] is not None:
            callbacks.append(BestModelCallback(save_path="./results", auto_load=True, ckpt_name=f"{self.args.model}-{self.args.phase}-best-model"))
        trainer = Trainer(
            network=self.model,
            train_dataset=self.dataset['train'],
            eval_dataset=self.dataset['val'],
            optimizer=optimizer,
            metrics=self.metric if self.dataset['val'] else None,
            loss_fn=self.loss_fn,
            epochs=self.args.epoch,
            callbacks=callbacks,
        )

        trainer.train(target_columns='labels')
        if self.args.phase == 'DT':
            save_path = "./results"
            model_params = self.model
            save_checkpoint(model_params, f"{save_path}/distilled-meta-model.ckpt")
            config_dicts = {
                'divided_depths': self.divided_depths,
                'meta_divided_depths': self.meta_divided_depths
            }
            with open(f"{save_path}/distilled-meta-model-config.pickle", 'wb') as f:
                pickle.dump(config_dicts, f)
        if self.args.phase == 'FT':
            save_path = "./results"
            model_params = self.model
            save_checkpoint(model_params, f"{save_path}/finetuned-meta-model.ckpt")
            config_dicts = {
                'divided_depths': self.divided_depths,
                'meta_divided_depths': self.meta_divided_depths
            }
            with open(f"{save_path}/finetuned-meta-model-config.pickle", 'wb') as f:
                pickle.dump(config_dicts, f)
        elif self.args.phase == 'ICB':
            meta_divided_depths = deepcopy(self.meta_divided_depths)
            meta_divided_depths[self.idx] = self.divided_depths[self.idx]
            save_path = "./results"
            model_params = self.model
            save_checkpoint(model_params, f"{save_path}/incub-meta-model-{self.idx}.ckpt")
            config_dicts = {
                'divided_depths': self.divided_depths,
                'meta_divided_depths': meta_divided_depths
            }
            with open(f"{save_path}/incub-meta-model-config-{self.idx}.pickle", 'wb') as f:
                pickle.dump(config_dicts, f)
    
    def do_test(self):
        if self.args.phase == "OT":
            print("Evluating offsite tuning...")
            # only load paramters in last modular
            base_model_layers = sum(self.divided_depths)
            load_layers = list(range(base_model_layers - self.meta_divided_depths[-1], base_model_layers))
            print("need loaded layers in base model: ", load_layers)
            layer_base2meta = self.get_layer_base2meta()
            base_model_dict_parameters = self.base_model.parameters_dict()
            meta_model_dict_parameters = self.model.parameters_dict()
            meta_model_dict_parameters = deepcopy(meta_model_dict_parameters)
            print("layer_base2meta: ", layer_base2meta)
            load_parameters = []
            for name, param in self.base_model.parameters_and_names():
                if "roberta.encoder.layer" in name:
                    prefix, suffix = re.split(r'\d{1,2}', name)
                    layer_index = re.findall(r'\d{1,2}', name)[0]
                    layer_index = int(layer_index)
                    if layer_index in load_layers:
                        key = f'{prefix}{layer_base2meta[layer_index]}{suffix}'
                        base_model_dict_parameters[name].set_data(meta_model_dict_parameters[key])
                        load_parameters.append(key)
                elif "classifier" in name:
                    base_model_dict_parameters[name].set_data(meta_model_dict_parameters[name])
                    load_parameters.append(name)
            print("load_parameters: ", load_parameters)
            
            # test
            callbacks = [
                LogCallback(args=self.args, prefix='final', is_eval=True), 
            ]
            evaluator = Evaluator(
                network=self.base_model,
                eval_dataset=self.dataset['val'],
                metrics=self.metric,
                callbacks=callbacks
            )
            _, metric_name, metric_value = evaluator._run(tgt_columns="labels") # 初始化meta_model时，将base_model深拷贝就不会报错了
            wandb.summary[f"final_{metric_name[0].lower()}"] = metric_value[0]
        elif self.args.phase == "ICB":
            # test
            callbacks = [
                LogCallback(args=self.args, prefix='final', is_eval=True), 
            ]
            evaluator = Evaluator(
                network=self.model,
                eval_dataset=self.dataset['val'],
                metrics=self.metric,
                callbacks=callbacks
            )
            _, metric_name, metric_value = evaluator._run(tgt_columns="labels") # 初始化meta_model时，将base_model深拷贝就不会报错了
            wandb.summary[f"final_{metric_name[0].lower()}"] = metric_value[0]
    
    def do_infer(self):
        # get client output
        while(True):
            sentence1 = input("sentence1: ")
            sentence2 = input("sentence2: ")
            input_text = [sentence1, sentence2]
            tokenized_input = self.tokenizer(*input_text)
            for key in tokenized_input.keys():
                tokenized_input[key] = np.array([tokenized_input[key], ])
                tokenized_input[key] = ms.Tensor(tokenized_input[key])
            client_output = self.model(**tokenized_input)
            
            # get server output
            server_output = self.server_model(**tokenized_input)
            id2results = {
                0: "not_equivalent", 
                1: "equivalent"
            }
            result_id = server_output.logits.max(axis=-1, return_indices=True)[1]
            result = id2results[int(result_id)]
            print(result)
        
                        
    def get_layer_base2meta(self):
        meta_divided_depths = self.meta_divided_depths
        origin_divided_depth = self.divided_depths
        target2meta = {}
        for i in range(len(meta_divided_depths)):
            step = origin_divided_depth[i] / meta_divided_depths[i]
            step = ceil(step)
            meta_layer_index = sum(meta_divided_depths[:i])
            target_layer_index = sum(origin_divided_depth[:i])
            flag = meta_divided_depths[i] - origin_divided_depth[i] % meta_divided_depths[i] - 1
            if step * (meta_divided_depths[i] - 1) <  origin_divided_depth[i]:
                flag = 0
            for j in range(meta_divided_depths[i]):
                target2meta[target_layer_index] = j + meta_layer_index
                if flag != 0:
                    target_layer_index += 1
                    flag -= 1
                else:
                    target_layer_index += step
        return target2meta