# -*- coding: utf-8 -*-  
'''
训练tbert模型

Created on 2021年9月13日
@author: luoyi
'''
import numpy as np  
np.set_printoptions(threshold=np.inf)  

import utils.conf as conf
from utils.iexicon import LiteWordsWarehouse, WordsWarehouse
from data.sohu_thuc_news.tbert_dataset import TBertTFRecordDataset
from models.tbert.nets import TBert


#    初始化词库
print('初始化词库...')
WordsWarehouse.instance().load_pkl()
LiteWordsWarehouse.instance().load_pkl()

#    数据源
print('初始化数据源...')
batch_size = 2
count = conf.DATASET_SOHU_THUCNEWS.get_tbert_training_train_count()
steps_per_epoch = count // batch_size
epochs = conf.DATASET_SOHU_THUCNEWS.get_epochs()
tbert_ds = TBertTFRecordDataset()
db_train = tbert_ds.tensor_train_db(batch_size=batch_size, epochs=epochs)
db_val = tbert_ds.tensor_val_db(batch_size=batch_size, epochs=1)

#    准备模型
print('初始化模型...')
tbert = TBert()
tbert.show_info()

#    喂数据
print('喂数据...')
tbert.train_tensor_db(db_train, db_val, steps_per_epoch, batch_size, 
                      epochs, 
                      auto_save_weights_after_traind=True, auto_save_weights_dir=conf.TBERT.get_model_save_weights_path(), 
                      auto_learning_rate_schedule=True, 
                      auto_tensorboard=True, auto_tensorboard_dir=conf.TBERT.get_tensorboard_dir_path())

