nherve commited on
Commit
eeb4ab4
1 Parent(s): f96ac31

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +55 -0
config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "amp": 1,
3
+ "attention_dropout": 0.1,
4
+ "bos_index": 0,
5
+ "bptt": 512,
6
+ "clip_grad_norm": 5,
7
+ "dropout": 0.1,
8
+ "emb_dim": 768,
9
+ "encoder_only": true,
10
+ "eos_index": 1,
11
+ "fp16": true,
12
+ "gelu_activation": true,
13
+ "group_by_size": true,
14
+ "id2lang": {
15
+ "0": "fr"
16
+ },
17
+ "lang2id": {
18
+ "fr": 0
19
+ },
20
+ "langs": [
21
+ "fr"
22
+ ],
23
+ "layer_norm_eps": 1e-06,
24
+ "lg_sampling_factor": -1,
25
+ "lgs": "fr",
26
+ "mask_index": 5,
27
+ "max_batch_size": 0,
28
+ "max_vocab": -1,
29
+ "mlm_steps": [
30
+ [
31
+ "fr",
32
+ null
33
+ ]
34
+ ],
35
+ "n_heads": 12,
36
+ "n_langs": 1,
37
+ "n_layers": 12,
38
+ "n_words": 48701,
39
+ "pad_index": 2,
40
+ "pre_norm": false,
41
+ "sample_alpha": 0,
42
+ "share_inout_emb": true,
43
+ "tokens_per_batch": -1,
44
+ "unk_index": 3,
45
+ "use_apex": true,
46
+ "use_lang_emb": true,
47
+ "word_blank": 0,
48
+ "word_dropout": 0,
49
+ "word_keep": 0.1,
50
+ "word_mask": 0.8,
51
+ "word_mask_keep_rand": "0.8,0.1,0.1",
52
+ "word_pred": 0.15,
53
+ "word_rand": 0.1,
54
+ "word_shuffle": 0
55
+ }