import tensorflow.keras.backend as K
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam


from bert4keras.tokenizers import Tokenizer
from bert4keras.models import build_transformer_model

from utils.datagenerator import BertClassifyDataGenerator
from utils.bert_info import BertInfo
from utils.dataloader import IflytekDataLoader
from utils.layers import BinaryRandomChoice
from utils.metrics import metrics_classify_accuracy
from utils.callback import AccuracyCallback



num_classes = 119
maxlen = 128
batch_size = 32


bertinfo_object = BertInfo()
# 建立分词器
tokenizer = Tokenizer(bertinfo_object.dict_path,do_lower_case=True)





# 加载预训练模型（12层）
predecessor = build_transformer_model(
    config_path=bertinfo_object.config_path,
    checkpoint_path=bertinfo_object.checkpoint_path,
    return_keras_model=False,
    prefix='Predecessor-'
)

# 加载预训练模型（3层）
successor = build_transformer_model(
    config_path=bertinfo_object.config_path,
    checkpoint_path=bertinfo_object.checkpoint_path,
    return_keras_model=False,
    num_hidden_layers = 3,
    prefix = 'Successor-'
)


def bert_of_theseus(predecessor,successor,classifier_model):
    """bert of theseus
        """
    inputs = predecessor.inputs
    # 固定住已经训练好的层
    for layer in predecessor.model.layers:
        layer.trainable = False
    classifier_model.trainable = False
    # Embedding层替换
    predecessor_outputs = predecessor.apply_embeddings(inputs)
    successor_outputs = successor.apply_embeddings(inputs)

    outputs = BinaryRandomChoice()([predecessor_outputs,successor_outputs])

    # Transformer层替换
    layers_per_module = predecessor.num_hidden_layers // successor.num_hidden_layers
    for index in range(successor.num_hidden_layers):
        predecessor_outputs = outputs
        for sub_index in range(layers_per_module):
            predecessor_outputs = predecessor.apply_main_layers(
                predecessor_outputs,layers_per_module * index + sub_index
            )
        successor_outputs = successor.apply_main_layers(outputs,index)
        outputs = BinaryRandomChoice()([predecessor_outputs,successor_outputs])

    # 返回模型
    outputs= classifier_model(outputs)
    model = Model(inputs,outputs)
    return model


x_in = layers.Input(shape=K.int_shape(predecessor.output)[1:])
x = layers.Lambda(lambda x:x[:,0])(x_in)
x = layers.Dense(units=num_classes,activation='softmax')(x)
classfier = Model(x_in,x)


predecessor_model = Model(predecessor.inputs,classfier(predecessor.output))
predecessor_model.compile(
    loss='sparse_categorical_crossentropy',
    optimizer= Adam(2e-5),
    metrics=['sparse_categorical_accuracy']
)
predecessor_model.summary()

successor_model = Model(successor.inputs,classfier(successor.output))
successor_model.compile(
    loss='sparse_categorical_crossentropy',
    optimizer=Adam(2e-5),
    metrics=['sparse_categorical_accuracy']
)
successor_model.summary()


theseus_model = bert_of_theseus(predecessor,successor,classfier)
theseus_model.compile(
    loss='sparse_categorical_crossentropy',
    optimizer=Adam(2e-5),
    metrics=['sparse_categorical_accuracy']
)
theseus_model.summary()


if __name__ == '__main__':
    # 加载数据
    iflytekData = IflytekDataLoader()
    train_data = iflytekData.get_train_data()
    valid_data = iflytekData.get_valid_data()

    #转换数据集
    train_generator = BertClassifyDataGenerator(train_data, tokenizer, maxlen, batch_size)
    valid_generator = BertClassifyDataGenerator(valid_data, tokenizer, maxlen, batch_size)


    predecessor_accuracy_callback = AccuracyCallback("./best_predecessor/",metrics_classify_accuracy,valid_generator)
    predecessor_model.fit_generator(
        train_generator.forfit(),
        #steps_per_epoch=len(train_generator),
        steps_per_epoch=1,
        epochs=5,
        #callbacks=[predecessor_accuracy_callback]
    )

    # 训练theseus
    theseus_accuracy_callback = AccuracyCallback("./best_theseus/",metrics_classify_accuracy,valid_generator)
    theseus_model.fit(
        train_generator.forfit(),
        #steps_per_epoch=len(train_generator),
        steps_per_epoch=1,
        epochs=10,
        callbacks=[theseus_accuracy_callback]
    )


    # 训练successor
    successor_accuracy_callback = AccuracyCallback('./best_successor/',metrics_classify_accuracy,valid_generator)
    successor_model.fit(
        train_generator.forfit(),
        #steps_per_epoch=len(train_generator),
        steps_per_epoch=1,
        epochs=5,
        callbacks=[successor_accuracy_callback]
    )