import argparse
from Transformer.modules.embedding import TransformerCharEmbed

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', type=str, default='data//')

args = parser.parse_args()

dataset = args.dataset

n_heads = 14
head_dims = 128
num_layers = 2
lr = 0.0009
attn_type = 'adatrans'
char_type = 'cnn'

pos_embed = None

#########hyper
batch_size = 16
warmup_steps = 0.01
after_norm = 1
model_type = 'transformer'
normalize_embed = True

dropout=0.15
fc_dropout=0.4

encoding_type = 'bioes'
name = 'caches/{}_{}_{}_{}_{}.pkl'.format(dataset, model_type, encoding_type, char_type, normalize_embed)
d_model = n_heads * head_dims
dim_feedforward = int(2 * d_model)

