
from bert4keras.models import build_transformer_model
from bert4keras.tokenizers import Tokenizer,load_vocab

from tensorflow.keras.layers import Input,Embedding,Reshape
from tensorflow.keras import Model
from tensorflow.keras.callbacks import Callback
from tensorflow.keras.models import save_model

from utils.dataloader import SentimentLoader
from utils.datagenerator import BertDataGenerator
from utils.loss import LanguageModelLossLayer
from utils.generator import BertConditionGenerator

max_len = 128
batch_size = 32
num_classes = 2
epochs = 2

from utils.bert_info import BertInfo
bert_info_obj = BertInfo()

token_dict,keep_tokens = load_vocab(
    dict_path=bert_info_obj.dict_path,
    simplified=True,
    startswith=['[PAD]', '[UNK]', '[CLS]', '[SEP]'],
)




# 初始化一个条件变量
condition_input = Input(shape=(1,))
condition_input_embedding = Embedding(2,128)(condition_input)
condition_input_embedding = Reshape((128,))(condition_input_embedding)

# bert model
bert_model = build_transformer_model(
    config_path=bert_info_obj.config_path,
    checkpoint_path=bert_info_obj.checkpoint_path,
    application='lm',
    keep_tokens=keep_tokens,
    layer_norm_cond = condition_input_embedding,
    additional_input_layers=condition_input
)

output = LanguageModelLossLayer(1)([bert_model.inputs[0],bert_model.outputs[0]])

model = Model(bert_model.inputs,output)
model.compile(optimizer='adam')
model.summary()



tokenizer = Tokenizer(token_dict,do_lower_case=True)

sentiment_data = SentimentLoader()
total_data = []
total_data.extend(sentiment_data.get_train_data())
total_data.extend(sentiment_data.get_test_data())
total_data.extend(sentiment_data.get_valid_data())


sentiment_generator = BertConditionGenerator(
    start_id=tokenizer._token_start_id,
    end_id = tokenizer._token_end_id,
    maxlen=max_len,
    tokenizer=tokenizer,
    generator_model=model
)

def just_show():
    print("正面采样:")
    print(sentiment_generator.generate(1,5,5),"\n")
    print("负面采样")
    print(sentiment_generator.generate(0,5,5),"\n")

class Evaluator(Callback):
    def __init__(self):
        self.lowest = 1e10
        super(Evaluator,self).__init__()
    def on_epoch_end(self, epoch, logs=None):
        if logs["loss"] <= self.lowest:
            self.lowest = logs['loss']

            save_model(self.model,'./model_saved/best_model')
            #self.model.save()
        just_show()



if __name__ == '__main__':
    evaluator = Evaluator()
    train_generator = BertDataGenerator(total_data,tokenizer,maxlen=max_len,batch_size=batch_size)

    model.fit_generator(
        train_generator.forfit(),
        steps_per_epoch=10,
        epochs=epochs,
        callbacks=[evaluator]
    )