system HF staff commited on
Commit
0246404
1 Parent(s): d3588ea

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -9
config.json CHANGED
@@ -4,26 +4,23 @@
4
  ],
5
  "attn_type": "bi",
6
  "bi_data": false,
 
7
  "clamp_len": -1,
8
  "d_head": 64,
9
  "d_inner": 3072,
10
  "d_model": 768,
11
  "dropout": 0.1,
12
  "end_n_top": 5,
 
13
  "ff_activation": "relu",
14
- "finetuning_task": null,
15
  "initializer_range": 0.02,
16
- "is_decoder": false,
17
  "layer_norm_eps": 1e-12,
18
  "mem_len": null,
 
19
  "n_head": 12,
20
  "n_layer": 12,
21
- "n_token": 32000,
22
- "num_labels": 2,
23
- "output_attentions": false,
24
- "output_hidden_states": false,
25
  "output_past": true,
26
- "pruned_heads": {},
27
  "reuse_len": null,
28
  "same_length": false,
29
  "start_n_top": 5,
@@ -31,7 +28,6 @@
31
  "summary_last_dropout": 0.1,
32
  "summary_type": "last",
33
  "summary_use_proj": true,
34
- "torchscript": false,
35
  "untie_r": true,
36
- "use_bfloat16": false
37
  }
4
  ],
5
  "attn_type": "bi",
6
  "bi_data": false,
7
+ "bos_token_id": 1,
8
  "clamp_len": -1,
9
  "d_head": 64,
10
  "d_inner": 3072,
11
  "d_model": 768,
12
  "dropout": 0.1,
13
  "end_n_top": 5,
14
+ "eos_token_id": 2,
15
  "ff_activation": "relu",
 
16
  "initializer_range": 0.02,
 
17
  "layer_norm_eps": 1e-12,
18
  "mem_len": null,
19
+ "model_type": "xlnet",
20
  "n_head": 12,
21
  "n_layer": 12,
 
 
 
 
22
  "output_past": true,
23
+ "pad_token_id": 5,
24
  "reuse_len": null,
25
  "same_length": false,
26
  "start_n_top": 5,
28
  "summary_last_dropout": 0.1,
29
  "summary_type": "last",
30
  "summary_use_proj": true,
 
31
  "untie_r": true,
32
+ "vocab_size": 32000
33
  }