#region packages
import itertools
import torch
import numpy
import os
from transformers import AutoConfig
from transformers import logging
import logging as wlog
import random
from train_bert import Instructor
logging.set_verbosity_error()
#endregion

wlog.basicConfig(
    level=wlog.INFO,  
    filename=f'log/log.txt',
    filemode='a', 
    format='%(asctime)s - %(levelname)s: %(message)s'
)

#bert-base-uncased
#vinai/bertweet-base
class opt: hidden_dim=768; maxlen=128; device='cuda'; seed=None; log_step=10; repeats=5; plm='vinai/bertweet-base'; \
    db='jb_bs'; dropout=0.2; endurance=3; batch_size=32; num_epoch=10; wd=1e-5; lr=2e-5; train_ratio=0.85; \
    bertConfig = AutoConfig.from_pretrained(plm)

# opt.bertConfig.attention_probs_dropout_prob = 0.2
# opt.bertConfig.hidden_dropout_prob = 0.1

dbs = ['jb_bs', 'bs_jb'] #'jb_dt', 'dt_jb', 'dt_bs', 'bs_dt', 

for db in dbs:
    for _ in range(opt.repeats):
        opt.db = db
        opt.seed = random.randint(0,99999)
        print('seed:', opt.seed)
        if opt.seed is not None:
            random.seed(opt.seed)
            numpy.random.seed(opt.seed)
            torch.manual_seed(opt.seed)
            torch.cuda.manual_seed(opt.seed)
            torch.backends.cudnn.deterministic = True
            torch.backends.cudnn.benchmark = False
            os.environ['PYTHONHASHSEED'] = str(opt.seed)
        ins = Instructor(opt)
        ins.train()