#!/usr/bin/python3
# -*- coding: utf-8 -*-
# File  : distiller.py
# Author: anyongjin
# Date  : 2020/9/1
from DistillBert.model import *
from DistillBert.students import *
from DistillBert.data import build_cols_from_iter
from tensorflow.keras import backend as K
from DistillBert.utils import src_dir


def cos_distance(y_true, y_pred):
    y_true = K.l2_normalize(y_true, axis=-1)
    y_pred = K.l2_normalize(y_pred, axis=-1)
    return K.mean(1 - K.sum((y_true * y_pred), axis=-1))


def distil_loss(base_loss, temperature=1):

    def soft_target(y_true, y_pred):
        y_true = K.softmax(y_true / temperature)
        y_pred = K.softmax(y_pred / temperature)
        return base_loss(y_true, y_pred) * temperature ** 2

    return soft_target


class BaseDistiller:
    def __init__(self, config_path):
        self.config = get_config(config_path)
        self.distill_cfg = self.config['distill']
        self.temperature = self.distill_cfg['temperature']
        assert self.temperature > 0.0, 'temperature must > 0'
        self.kl_loss = tf.keras.losses.KLDivergence()
        self.ce_loss = tf.keras.losses.CategoricalCrossentropy(from_logits=True)
        self.mse_loss = tf.keras.losses.MSE
        self.cosine_loss = cos_distance
        self.tempt_loss = distil_loss(self.kl_loss, self.temperature)

    def get_tokenize_func_base(self, wrapper):
        def _token(text):
            return wrapper.tokenizer.get_input_ids(text)
        return _token

    def build_data_loaders(self, wrapper: BaseDistilModel, col_processers, dev_dir, train_dir=None):
        from DistillBert.data import DataLoader
        train_loader, dev_loader = None, None
        if train_dir and os.path.isdir(train_dir):
            train_loader = DataLoader(train_dir, col_processers=col_processers,
                                      batch_size=wrapper.train_args['batch_size'],
                                      shuffle_fac=wrapper.train_args['shuffle_fac'],
                                      epoch_num=sys.maxsize)  # epoch set to max to ensure enogh data
        dev_loader = DataLoader(dev_dir, col_processers=col_processers,
                                batch_size=wrapper.train_args['batch_size'])
        return train_loader, dev_loader

    def get_loss_fn(self, tempt_weight=1., mse_weight=0.,
                    cosine_weight=0., cate_cross_weight=0.):
        assert tempt_weight + mse_weight + cosine_weight + cate_cross_weight > 0, 'loss weight must > 0'

        def loss_fn(y_true, y_pred):
            final_loss = 0
            if tempt_weight > 0:
                final_loss += self.tempt_loss(y_true, y_pred)
            if mse_weight > 0:
                final_loss += self.mse_loss(y_true, y_pred)
            if cosine_weight > 0:
                final_loss += self.cosine_loss(y_true, y_pred)
            if cate_cross_weight > 0:
                final_loss += self.ce_loss(y_true, y_pred)
            return final_loss
        return loss_fn

    def compile_student(self, student: StudentModel, for_dense=False):
        import tensorflow as tf
        optimizer = tf.keras.optimizers.Adam(learning_rate=student.train_args['learning_rate'])

        if student.in_pretrain:
            # 经独立测试，通用语料蒸馏温度蒸馏效果最好
            losses = [self.get_loss_fn(tempt_weight=1, mse_weight=0, cosine_weight=0)]
            metrics = [
                distil_loss(self.kl_loss, 1),
                tf.keras.metrics.MSE,
                self.cosine_loss
            ]
        elif student.task_type == 'sentiment-analysis':
            if for_dense:
                losses = [self.get_loss_fn(tempt_weight=0, cosine_weight=0.1, mse_weight=0.9, cate_cross_weight=0)]
                metrics = losses
            else:
                losses = [self.get_loss_fn(tempt_weight=0, cosine_weight=0.1, mse_weight=0.9, cate_cross_weight=0)]
                metrics = [tf.keras.metrics.CategoricalAccuracy('acc')]
        else:
            raise Exception(f'no losses or metric specified for :{student.task_type}')
        student.model.compile(optimizer=optimizer, loss=losses, metrics=metrics)

    def test_model_steps(self, student: StudentModel, data_iter, batch_num: int=10):
        '''
        test every version of model
        :param student: model of StudentModel
        :param data_iter: data iter to gen data
        :param batch_num: how many batch_num to test
        :return:
        '''
        all_loss = self.get_loss_fn(tempt_weight=0.4, mse_weight=0.3, cosine_weight=0.3)
        cosine_loss = self.cosine_loss
        batch_datas = []
        loss_dic = {}
        for i in range(batch_num):
            batch_datas.append(next(data_iter))

        def calc_loss():
            loss1, loss2 = [], []
            for batch in batch_datas:
                x, y = batch
                y_out = student.model.predict(x)
                loss1.append(all_loss(y, y_out).numpy().mean())
                loss2.append(cosine_loss(y, y_out).numpy().mean())
            return sum(loss1) / len(loss1), sum(loss2) / len(loss2)
        # test best model
        best_all, best_cosine = calc_loss()
        loss_dic['best_all'] = best_all
        loss_dic['best_cosine'] = best_cosine
        # test step models
        names = os.listdir(student.out_model_dir)
        for n in names:
            arr = n.split('.')
            if len(arr) != 3 or arr[0] != 'model' or arr[-1] != 'h5':
                continue
            step = arr[1]
            student.load_model(None, student.out_model_dir, step=int(step))
            l_all, l_cosine = calc_loss()
            loss_dic[f'{step}_all'] = l_all
            loss_dic[f'{step}_cosine'] = l_cosine
        print(loss_dic)


class GeneralDistiller(BaseDistiller):
    def __init__(self, teacher_cls=TeacherModel, student_cls=StudentModel):
        super().__init__(os.path.join(src_dir, 'gen_config.yml'))
        self.bert = teacher_cls(in_pretrain=True)
        self.light_bert = student_cls(in_pretrain=True)

    def get_tokenize_func(self):
        return self.get_tokenize_func_base(self.bert)

    def get_corpus_read_fn(self):
        sql_len = self.bert.model_config.max_position_embeddings

        def read_corpus_token(fdata):
            import pickle
            raw_rows = pickle.load(fdata)
            rows = [
                [r + [0] * (sql_len - len(r))] for r in raw_rows
            ]
            return rows
        return read_corpus_token

    def get_corpus_data_iter(self, student: StudentModel):
        gen_data_path = self.distill_cfg['general_corpus_path']
        gen_batch_size = self.distill_cfg['gen_batch_size']

        data_loader = DataLoader(gen_data_path, read_fn=self.get_corpus_read_fn(), batch_size=gen_batch_size)
        data_iter = student.build_teach_data(data_loader.build(), self.bert.model)
        return data_loader, data_iter

    def distill_general(self):
        '''
        distill a light base model from bert,albert...
        :return:
        '''
        try:
            self.distill_light(self.light_bert)
            self.light_bert.save()
        except Exception as e:
            logger.error(traceback.format_exc())
            logger.error(e)
            self.light_bert.save()

    def distill_light(self, student: StudentModel):
        self.compile_student(student)
        data_loader, data_iter = self.get_corpus_data_iter(student)

        student.model.fit(x=data_iter, steps_per_epoch=1,
                          epochs=student.train_args['max_steps'],
                          callbacks=student.get_callbacks(data_loader, save_by_loss=True))

    def test_light_bert_steps(self):
        _, data_iter = self.get_corpus_data_iter(self.light_bert)
        self.test_model_steps(self.light_bert, data_iter)


class TaskDistiller(BaseDistiller):
    def __init__(self, teacher_cls=TeacherModel,
                 distill_from_teacher=True, do_quantize=False):
        super().__init__(os.path.join(src_dir, 'task_config.yml'))
        self.distill_from_teacher = distill_from_teacher
        if distill_from_teacher:
            self.teacher = teacher_cls()
        student_cls = stu_model_map[self.config['distill']['model_arc']]
        self.student = student_cls(distill_from_teacher=distill_from_teacher, do_quantize=do_quantize)
        self.label_vocab = self.student.label_vocab
        self.train_dir = os.path.join(self.config['data_dir'], self.config['task']['train_dir'])
        self.dev_dir = os.path.join(self.config['data_dir'], self.config['task']['dev_dir'])

    def get_tokenize_func(self):
        return self.get_tokenize_func_base(self.student)

    def train_teacher(self, col_processers):
        try:
            train_loader, dev_loader = self.build_data_loaders(self.teacher, col_processers,
                                                               self.dev_dir, self.train_dir)
            self.teacher.fit(train_loader, dev_loader)
            self.teacher.save()
        except Exception as e:
            logger.error(traceback.format_exc())
            logger.error(f'train teacher model error:{e}')
            self.teacher.save()

    def train_student(self, student: StudentModel, col_processers):
        train_loader, dev_loader = self.build_data_loaders(student, col_processers, self.dev_dir, self.train_dir)
        train_iter = train_loader.build()
        dev_iter = dev_loader.build()
        self.student.train_dense = student.train_args['logits_max_steps'] > 0
        self.student.train_mode = True
        self.student.load(self.student.out_model_dir)
        if self.student.train_dense:
            # train dense layer first
            self.compile_student(student, for_dense=True)
            if self.distill_from_teacher:
                train_iter = student.build_teach_data(train_iter, self.teacher.model)
                dev_iter = student.build_teach_data(dev_iter, self.teacher.model)
            else:
                train_iter = student.wrap_one_hot_out(train_iter)
                dev_iter = student.wrap_one_hot_out(dev_iter)
            logger.warning('building teacher pool_data for eval...')
            dev_data = build_cols_from_iter(dev_iter)

            student.model.fit(x=train_iter, validation_data=dev_data, steps_per_epoch=1,
                              epochs=student.train_args['logits_max_steps'],
                              validation_freq=student.train_args['eval_every_n_steps'],
                              callbacks=student.get_callbacks(train_loader, save_by_loss=True))
            logger.warning(f'train dense complete, start fine-tinue classifier layer')
            student.train_dense = False
            student.load_model(None, student.out_model_dir)
        self.student.model.layers[-1].set_weights(self.teacher.model.classifier.get_weights())
        self.compile_student(student, for_dense=False)
        dev_iter = dev_loader.build(rebuild=True)
        train_iter = train_loader.build(rebuild=True)
        dev_iter = student.build_teach_data(dev_iter, self.teacher.model)
        train_iter = student.build_teach_data(train_iter, self.teacher.model)
        logger.warning('building teacher out_data for eval...')
        dev_data = build_cols_from_iter(dev_iter)

        student.model.fit(x=train_iter, validation_data=dev_data, steps_per_epoch=1,
                          epochs=student.train_args['max_steps'],
                          validation_freq=student.train_args['eval_every_n_steps'],
                          callbacks=student.get_callbacks(train_loader, save_on_best_acc=True),
                          initial_epoch=student.train_args['initial_epoch'])

    def distill_task(self, col_processers, train_teacher=True):
        '''
        1. train teacher on task data (optional)
        2. distill teacher knowledge to light task model based on a general light model
        :return:
        '''
        try:
            # train teacher
            self.teacher.load(self.teacher.out_model_dir)
            if train_teacher:
                self.train_teacher(col_processers)
            # distill teacher
            self.train_student(self.student, col_processers)
        except Exception as e:
            logger.error(traceback.format_exc())
            logger.error(e)
        self.test(col_processers)

    def test(self, col_processers):
        _, dev_loader = self.build_data_loaders(self.teacher, col_processers, self.dev_dir)
        res = self.teacher.test(dev_loader)
        logger.warning(f'test on teacher:{res}')
        res = self.student.test(dev_loader)
        logger.warning(f'test on student:{res}')

    def test_student_steps(self, col_processers):
        _, dev_loader = self.build_data_loaders(self.teacher, col_processers, self.dev_dir)
        data_iter = dev_loader.build()
        data_iter = self.student.build_teach_data(data_iter, self.teacher.model)
        self.test_model_steps(self.student, data_iter)

    def save_student_light(self, model_dir=None):
        self.student.save(model_dir_or_path=model_dir)

