#!/usr/bin/python3
# -*- coding: utf-8 -*-
# File  : model.py
# Author: anyongjin
# Date  : 2020/8/14
import os
import sys
import abc
import traceback
import numpy as np
from DistillBert.utils import get_config, logger, default_cfg_path, src_dir
from DistillBert.data import DataLoader
from DistillBert.dstoken import Tokenizer
from DistillBert.kernel import *
from transformers import *
import tensorflow as tf
import shutil
cur_module = sys.modules[__name__]


def get_build_origin_teach_data(stu_hidden_size):
    def build_origin(x, y, t_outs):
        tch_out = t_outs[0].numpy()
        if tch_out.shape[-1] != stu_hidden_size:
            ori_shape = tch_out.shape
            tch_out = tch_out.reshape((-1, ori_shape[-1]))
            tch_out = pooling(tch_out, [1, int(ori_shape[-1] / stu_hidden_size)], 'mean')
            tch_out = tch_out.reshape(ori_shape[:2] + (-1,))
        return x, tch_out
    return build_origin


def get_build_sentiment_analysis_teach_data(for_dense=True):
    def build_sentiment_analysis_teach_data(x, y, t_outs):
        logits = t_outs[0].numpy()
        pooled_output = t_outs[1].numpy()
        if for_dense:
            return x, pooled_output
        assert len(logits.shape) == 2, 'output should be shape:[batch_size, num_labels], maybe use the wrong model_cls?'
        errs = logits.argmax(axis=-1) - y  # get teacher errors
        err_idxs = np.nonzero(errs)[0]
        if len(err_idxs) > len(errs) * 0.7:
            logger.error(f'teacher errors count:{len(err_idxs)}, total:{len(errs)} over 70%')
        mean_val, max_val = logits.mean(), logits.max()
        logits[err_idxs, :] = mean_val  # reset error data as mean
        for eid in err_idxs:
            logits[eid][y[eid]] = max_val
        return x, logits
    return build_sentiment_analysis_teach_data


def build_ner_teach_data(x, y, t_outs):
    logits = t_outs[0].numpy()
    return x, logits


class BaseDistilModel:

    def __init__(self, is_teacher=True, in_pretrain=False, model_dir=None,
                 distill_from_teacher=True):
        '''
        initialize a base model
        :param is_teacher:
        :param in_pretrain:
        :param model_dir:
        :param distill_from_teacher: whether to train from teacher output
        '''
        self.distill_from_teacher = distill_from_teacher
        cfg_name = 'gen_config.yml' if in_pretrain else 'task_config.yml'
        self.config = get_config(os.path.join(src_dir, cfg_name))
        self.is_teacher = is_teacher
        self.in_pretrain = in_pretrain
        if not in_pretrain:
            self.task_type = self.config['task_type']
            self.model_type = 'teacher' if is_teacher else 'student'
            self.base_model_dir = os.path.join(self.config['output_dir'], 'light_bert')
        else:
            self.model_type = 'bert' if is_teacher else 'light_bert'
        self.out_model_dir = model_dir or os.path.join(self.config['output_dir'], self.model_type)
        self.tokenizer = None
        self.model = None
        self.seq_len = None
        self.hidden_size = self.config['hidden_size']
        self.last_layer = None
        self.model_config = None
        self.tsfm_model = self.config['pretrained_model']
        self.train_args = {}
        self.my_cb: MyCallBack = None
        self.last_save_step = None  # last step number to save model
        self.train_args.update(self.config['train'])
        if not self.is_teacher and 'distill_train' in self.config:
            self.train_args.update(self.config['distill_train'])
        self.train_args.update({
            'output_dir': self.out_model_dir,
            'logging_dir': self.out_model_dir + '_log'
        })
        if not self.in_pretrain:
            from transformers.tokenization_bert import load_vocab
            label_vocab_path = os.path.join(self.config['data_dir'], self.config['task']['label_vocab'])
            self.label_vocab = load_vocab(label_vocab_path)
            self.label_vocab_size = len(self.label_vocab)

    @abc.abstractmethod
    def load_model(self, model_loader, restore_dir: str, step: int=None):
        pass

    @abc.abstractmethod
    def save_model(self, model_dir_or_path, **kwargs):
        pass

    def get_valid_dir(self, first_dir, default_dir=None):
        restore_dir = default_dir
        if os.path.isdir(first_dir) and os.listdir(first_dir):
            restore_dir = first_dir
        return restore_dir

    def load(self, model_dir, step: int=None):
        '''
        load model from dir. if multi model is saved, use `step` to specify a model
        :param model_dir:
        :param step: int, used to choose a step model
        :return:
        '''
        restore_dir = self.get_valid_dir(model_dir, self.tsfm_model)
        tokenizer_loader = None
        model_loader = TFAutoModel
        if 'tokenizer_cls' in self.config and self.config['tokenizer_cls']:
            tokenizer_loader = self.config['tokenizer_cls']
        if 'model_cls' in self.config and self.config['model_cls']:
            model_cls_name: str = self.config['model_cls']
            # assert model_cls_name.endswith('Model'), \
            #     'model_cls should be specified as a model without any specific head on top, e.g.(TFAlbertModel)'
            model_loader = getattr(cur_module, model_cls_name)
        token_map = None
        if 'token_map' in self.config and self.config['token_map']:
            token_map = self.config['token_map']
        self.seq_len = self.config['max_len'] if 'max_len' in self.config else None
        self.tokenizer = Tokenizer(restore_dir, tokenizer_loader,
                                   token_map_list=token_map, max_len=self.seq_len)
        kwargs = {}
        if self.is_teacher and 'teacher_cfg' in self.config:
            kwargs.update(self.config['teacher_cfg'])
        if not self.in_pretrain:
            kwargs['num_labels'] = self.label_vocab_size
        self.model_config = AutoConfig.from_pretrained(self.tsfm_model, **kwargs)
        # origin_size = self.model_config.hidden_size
        # near_size = int(origin_size / round(origin_size / self.hidden_size))
        # if near_size != self.hidden_size:
        #     logger.warning(f'hidden_size {self.hidden_size} must be a factor of '
        #                    f'model_config.hidden_size({origin_size}), changed to {near_size}')
        #     self.hidden_size = near_size
        self.load_model(model_loader, model_dir, step=step)

    def save(self, model_dir_or_path=None, save_format='h5', include_optimizer=False):
        if self.in_pretrain and self.model_type == 'bert':
            logger.warning('teacher is in pretrain mode, skip saving model...')
            return
        if model_dir_or_path is None:
            model_dir_or_path = self.out_model_dir
        model_dir = model_dir_or_path
        if model_dir_or_path.endswith('.h5'):
            model_dir = os.path.dirname(model_dir_or_path)
        elif not os.path.isdir(model_dir_or_path):
            # create dir if not exists
            os.mkdir(model_dir_or_path)
        self.save_model(model_dir_or_path, save_format=save_format, include_optimizer=include_optimizer)
        self.tokenizer.save_pretrained(model_dir)
        shutil.copy(default_cfg_path, os.path.join(model_dir, 'task_config.yml'))
        if self.my_cb:
            self.last_save_step = self.my_cb.train_step

    def compile_for_sentiment_analysis(self):
        import tensorflow as tf
        optimizer = tf.keras.optimizers.Adam(learning_rate=self.train_args['learning_rate'])
        loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
        metric = tf.keras.metrics.SparseCategoricalAccuracy('acc')
        self.model.compile(optimizer=optimizer, loss=loss, metrics=[metric])

    def compile(self):
        if hasattr(self.model, 'optimizer') and self.model.optimizer:
            logger.warning(f'model {self.model_type} is already been compiled!')
            return
        task_compile_name = 'compile_for_' + self.config['task_type'].replace('-', '_')
        if not hasattr(self, task_compile_name):
            raise Exception(f'please implement member function:{task_compile_name}')
        getattr(self, task_compile_name)()

    def get_callbacks(self, train_loader: DataLoader, save_by_loss=False,
                      save_on_best_acc=False, stop_on_eof=True, end_epoch=-1):
        my_cb = MyCallBack(self, data_loader=train_loader, save_by_avg_loss=save_by_loss,
                           stop_on_eof=stop_on_eof, save_on_best_acc=save_on_best_acc, end_epoch=end_epoch)
        from tensorflow.keras.callbacks import LearningRateScheduler

        init_lr = self.train_args['learning_rate']
        warmup_num = 0 if 'warmup_steps' not in self.train_args else self.train_args['warmup_steps']
        min_lr = self.train_args['min_lr']
        decay_base = float(self.train_args['decay_base'])
        decay_every = int(self.train_args['decay_every'])

        def lr_update(epoch, lr):
            if epoch <= warmup_num:
                lr = init_lr * epoch / warmup_num
            elif lr > min_lr:
                lr = init_lr * decay_base ** ((epoch - warmup_num) / decay_every)
            if epoch % 10 == 0:
                logger.warning(f'lr:{lr:.10f}')
            return lr
        lr_cb = LearningRateScheduler(lr_update)
        return [my_cb, lr_cb]

    def fit(self, train_loader: DataLoader, dev_loader: DataLoader):
        self.compile()
        train_data = train_loader.build()
        dev_data = dev_loader.get_data_cols()
        self.model.fit(x=train_data, validation_data=dev_data, steps_per_epoch=1,
                       epochs=self.train_args['max_steps'],
                       validation_freq=self.train_args['eval_every_n_steps'],
                       initial_epoch=self.train_args['initial_epoch'],
                       callbacks=self.get_callbacks(train_loader, save_by_loss=False,
                                                    save_on_best_acc=True))

    def test(self, dev_loader: DataLoader):
        self.compile()
        dev_data = dev_loader.get_data_cols()
        x, y = dev_data[:2]
        return self.model.test_on_batch(x, y, return_dict=True)


class TeacherModel(BaseDistilModel):
    def __init__(self, in_pretrain=False, model_dir=None):
        super().__init__(True, in_pretrain, model_dir=model_dir)

    def load_model(self, model_loader, model_dir, step: int=None):
        restore_dir = self.get_valid_dir(model_dir, self.tsfm_model)
        if self.in_pretrain:
            restore_dir = self.tsfm_model
        if self.task_type == 'sentiment-analysis':
            from DistillBert.teacher import TFAlbertForSeqClassify
            self.model = TFAlbertForSeqClassify.from_pretrained(restore_dir, config=self.model_config)
        else:
            raise Exception(f'unsupport task type:{self.task_type}')

    def save_model(self, model_dir_or_path, **kwargs):
        if kwargs and kwargs.get('save_format', None) == 'tf':
            self.model.save(model_dir_or_path, **kwargs)
        else:
            self.model.save_pretrained(model_dir_or_path)



class MyCallBack(tf.keras.callbacks.Callback):
    '''
    view :https://www.tensorflow.org/guide/keras/custom_callback
    '''
    def __init__(self, wrapper: BaseDistilModel, data_loader: DataLoader=None, save_on_best_acc=False,
                 save_by_avg_loss=False, stop_on_eof=False, call_backs=None, end_epoch=-1):
        super(MyCallBack, self).__init__()
        self.wrapper = wrapper
        self.wrapper.my_cb = self
        self.save_on_best_acc = save_on_best_acc
        self.save_by_avg_loss = save_by_avg_loss
        self.stop_on_eof = stop_on_eof
        self.data_loader = data_loader
        if stop_on_eof and not data_loader:
            raise ValueError('data_loader is required for stop_on_eof')
        self.call_backs = call_backs or {}
        self.train_step = 0

        # for save_by_avg_loss
        self.save_step_interval = wrapper.train_args['save_every']
        self.max_save_num = wrapper.train_args['max_save_num']
        self.epoch_loss_list = []  # latest 300 loss
        self.save_list = []  # [(file_name, avg_loss, step), ...]

        # for stop_on_eof
        self.last_epoch = 1

        # for best_acc_save
        self.best_acc = 0

        # end epoch
        self.end_epoch = end_epoch

    def save_on_best_test_acc(self, logs):
        acc = logs.get('acc')
        if acc > self.best_acc:
            self.best_acc = acc
            logger.warning(f'saving best model:{self.best_acc}')
            self.wrapper.save()

    def do_save_by_avg_loss(self, epoch, logs):
        self.epoch_loss_list.append(logs['loss'])
        if epoch % 100 == 0 and len(self.epoch_loss_list) > 300:
            self.epoch_loss_list = self.epoch_loss_list[-300:]
        if epoch % self.save_step_interval == 0 and epoch >= 1:
            latest_loss = self.epoch_loss_list[-30:]
            cur_avg_loss = sum(latest_loss) / len(latest_loss)
            if len(self.save_list) >= self.max_save_num:
                self.save_list = list(sorted(self.save_list, key=lambda x: x[1], reverse=True))
                if cur_avg_loss < self.save_list[0][1]:
                    file_name, max_loss, step = self.save_list.pop(0)
                    os.remove(file_name)
                else:
                    return
            model_path = os.path.join(self.wrapper.out_model_dir, f'model.{epoch}.h5')
            self.wrapper.save(model_path, save_format='h5')
            self.save_list.append([model_path, cur_avg_loss, epoch])

    def do_stop_on_eof(self):
        from tensorflow.keras import backend as K
        lr = float(K.get_value(self.wrapper.model.optimizer.lr))
        # logger.info(f'lr:{lr}')
        real_epoch = self.data_loader.get_min_epoch()
        if real_epoch > self.last_epoch:
            max_epoch_num = self.wrapper.train_args['num_epochs']
            if real_epoch > max_epoch_num:
                logger.warning(f'epoch num reach the limit of num_epochs:{max_epoch_num}, stopping...')
                self.wrapper.model.stop_training = True
                return True
            self.last_epoch = real_epoch
            logger.warning(f'switched into {real_epoch}th epoches')

    def on_epoch_end(self, epoch, logs=None):
        self.train_step = epoch
        if self.save_by_avg_loss and logs:
            self.do_save_by_avg_loss(epoch, logs)
        if self.stop_on_eof:
            self.do_stop_on_eof()
        if 'epoch_end' in self.call_backs:
            self.call_backs['epoch_end'](epoch, logs)
        if epoch >= self.end_epoch > 0:
            logger.warning(f'reach end epoch {self.end_epoch}, stopping...')
            self.wrapper.model.stop_training = True

    def on_test_end(self, logs=None):
        if self.save_on_best_acc and logs:
            self.save_on_best_test_acc(logs)
        logs = logs or {}
        if 'test_end' in self.call_backs:
            self.call_backs['test_end'](logs)

