# -*- coding: utf-8 -*-
"""
@date: 2020/12/18 14:05
@file: toych_bert_row.py
@author: lilong
@desc: torch_bert流程跑通
"""

import argparse

from torch.utils.data import DataLoader

from bert_torch.model import BERT
from bert_torch.trainer import BERTTrainer
from bert_torch.dataset import BERTDataset, WordVocab

from bert_torch.dataset.vocab import TorchVocab

vocab_path = "../pretrainedModel/chinese_L-12_H-768_A-12/tt_vocab.txt"

# 加载词典
print("Loading Vocab:", vocab_path)
vocab = WordVocab.load_vocab(vocab_path)
print("Vocab Size: ", len(vocab))
print("vocab:", vocab)

# 加载训练数据
train_dataset = "../data/score_test.txt"
seq_len = 50
corpus_lines = 100
on_memory = True
print("Loading Train Dataset:", train_dataset)
train_dataset = BERTDataset(train_dataset, vocab, seq_len=seq_len,
                            corpus_lines=corpus_lines, on_memory=on_memory)

test_dataset = BERTDataset(train_dataset, vocab, seq_len=seq_len,
                            corpus_lines=corpus_lines, on_memory=on_memory)

batch_size = 64
num_workers = 2
print("Creating Dataloader")
train_data_loader = DataLoader(train_dataset, batch_size=batch_size, num_workers=num_workers)

test_data_loader = DataLoader(test_dataset, batch_size=batch_size, num_workers=num_workers) \
                            if test_dataset is not None else None


# 模型构建
hidden = 256
layers = 8
attn_heads = 8
print("Building BERT model")
bert = BERT(len(vocab), hidden=hidden, n_layers=layers, attn_heads=attn_heads)

# 模型训练
lr = 1e-3
adam_beta1 = 0.9
adam_beta2 = 0.999
adam_weight_decay = 0.01
with_cuda = False
cuda_devices = None
log_freq = 10
print("Creating BERT Trainer")
trainer = BERTTrainer(bert, len(vocab),
                      train_dataloader=train_data_loader,
                      test_dataloader=test_data_loader,
                      lr=lr,
                      betas=(adam_beta1, adam_beta2),
                      weight_decay=adam_weight_decay,
                      with_cuda=with_cuda,
                      cuda_devices=cuda_devices,
                      log_freq=log_freq)

epochs = 10
output_path = "./"
print("Training Start")
for epoch in range(epochs):
    trainer.train(epoch)
    trainer.save(epoch, output_path)

    if test_data_loader is not None:
        trainer.test(epoch)
