File size: 720 Bytes
cb433d6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
global:
  name: pretrain-language-model
  phase: train
  stage: pretrain-language
  workdir: workdir
  seed: ~
 
dataset:
  train: {
    roots: ['data/WikiText-103.csv'],
    batch_size: 4096
  }
  test: {
    roots: ['data/WikiText-103_eval_d1.csv'],
    batch_size: 4096
  }

training:
  epochs: 80
  show_iters: 50
  eval_iters: 6000
  save_iters: 3000

optimizer:
  type: Adam
  true_wd: False
  wd: 0.0
  bn_wd: False
  clip_grad: 20
  lr: 0.0001
  args: {
    betas: !!python/tuple [0.9, 0.999], # for default Adam 
  }
  scheduler: {
    periods: [70, 10],
    gamma: 0.1,
  }

model:
  name: 'modules.model_language.BCNLanguage'
  language: {
    num_layers: 4,
    loss_weight: 1.,
    use_self_attn: False
  }