# -*- coding: utf-8 -*-  
'''
训练bert的预训练

@author: luoyi
Created on 2021年4月16日
'''
import sys
import os
#    取项目根目录
ROOT_PATH = os.path.abspath(os.path.dirname(__file__)).split('couplet')[0]
ROOT_PATH = ROOT_PATH + "couplet"
sys.path.append(ROOT_PATH)

import data.dataset_bert as ds_bert
import utils.conf as conf
import utils.dictionaries_bert as dict_bert
import utils.logger_factory as logf
from models.bert.nets import BertModel
from math import ceil


log = logf.get_logger('bert_train')


#    初始化词/字典
dict_bert.load_dict_from_file()


#    准备bert训练/验证数据集
count = conf.DATASET.get_count_train()
batch_size = conf.BERT.get_batch_size()
epochs = conf.BERT.get_epochs()
steps_per_epoch = ceil(count / batch_size)
log.info('inif dataset...')
db_train = ds_bert.tensor_db_from_tfrecord(tfrecord_dir=conf.BERT.get_pre_training_tfrecord_train(), 
                                           batch_size=batch_size, 
                                           epochs=epochs, 
                                           shuffle_buffer_rate=conf.BERT.get_shuffle_buffer_rate(), 
                                           tfrecord_buffer_rate=conf.BERT.get_tfrecord_buffer_rate())
db_val = ds_bert.tensor_db_from_tfrecord(tfrecord_dir=conf.BERT.get_pre_training_tfrecord_val(), 
                                         batch_size=batch_size, 
                                         epochs=epochs, 
                                         shuffle_buffer_rate=conf.BERT.get_shuffle_buffer_rate(), 
                                         tfrecord_buffer_rate=conf.BERT.get_tfrecord_buffer_rate())
log.info('train_tfrecord_dir:{} count:{} batch_size:{} epochs:{}'.format(conf.BERT.get_pre_training_tfrecord_train(), count, batch_size, epochs))
log.info('val_tfrecord_dir:{} batch_size:{} epochs:{}'.format(conf.BERT.get_pre_training_tfrecord_val(), batch_size, epochs))
log.info('init dataset finished.')

#    准备模型
bert_model = BertModel(name='bert', 
                       learning_rate=conf.BERT.get_learning_rate(), 
                       input_shape=(None, 2, conf.BERT.get_pre_training_sentence_maxlen()), 
                       auto_assembling=True, 
                       is_build=True, 
                       vocab_size=dict_bert.dict_size(), 
                       max_sen_len=conf.BERT.get_pre_training_sentence_maxlen(), 
                       max_sen=conf.BERT.get_pre_training_max_sentences(), 
                       n_block=conf.BERT.get_n_block(), 
                       n_head=conf.BERT.get_n_head_attention(), 
                       d_model=conf.BERT.get_d_model(), 
                       f_model=conf.BERT.get_f_model(), 
                       dropout_rate=conf.BERT.get_dropout_rate(), 
                       lamud_loss_pre_nsp=conf.BERT.get_lamud_loss_pre_nsp(), 
                       lamud_loss_pre_mlm=conf.BERT.get_lamud_loss_pre_mlm())
bert_model.show_info()
log.info('load model finished. ')
log.info('learning_rate:{}'.format(conf.BERT.get_learning_rate()))


bert_model.train_tensor_db(db_train, db_val, 
                           steps_per_epoch, 
                           batch_size=conf.BERT.get_batch_size(), 
                           epochs=epochs, 
                           auto_save_weights_after_traind=True, auto_save_weights_dir=conf.BERT.get_model_save_weights(), 
                           auto_learning_rate_schedule=True, 
                           auto_tensorboard=True, auto_tensorboard_dir=conf.BERT.get_tensorboard_dir())




