File size: 529 Bytes
f643c3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
train:
  seed: 1234
  epochs: 300
  batch_size: 8
  gradient_accumulation: 4
  save_every_n_epoch: 1
  precision: 16-mixed
  gradient_clip: 1.0
optimizer:
  lr: 0.01
  lr_init: 0.00001
  lr_end: 0.0001
  warmup_steps: 2000
  decay_steps: 40000
data:
  max_eval_sample: 8
  max_sec: 54
  num_workers: 1
  pad_val: 1024 # same with EOS in model
model:
  vocab_size: 1025
  phoneme_vocab_size: 512
  embedding_dim: 1024
  hidden_dim: 1024
  head: 16
  linear_units: 2048
  n_layer: 16
  dropout: 0
  EOS: 1024
inference:
  top_k: 5