system HF staff commited on
Commit
4952163
1 Parent(s): 81f4785

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +34 -29
config.json CHANGED
@@ -2,32 +2,37 @@
2
  "architectures": [
3
  "XLNetLMHeadModel"
4
  ],
5
- "attn_type": "bi",
6
- "bi_data": false,
7
- "clamp_len": -1,
8
- "d_head": 64,
9
- "d_inner": 4096,
10
- "d_model": 1024,
11
- "dropout": 0.1,
12
- "end_n_top": 5,
13
- "ff_activation": "gelu",
14
- "finetuning_task": null,
15
- "initializer_range": 0.02,
16
- "layer_norm_eps": 1e-12,
17
- "mem_len": null,
18
- "n_head": 16,
19
- "n_layer": 24,
20
- "n_token": 32000,
21
- "num_labels": 2,
22
- "output_attentions": false,
23
- "output_hidden_states": false,
24
- "reuse_len": null,
25
- "same_length": false,
26
- "start_n_top": 5,
27
- "summary_activation": "tanh",
28
- "summary_last_dropout": 0.1,
29
- "summary_type": "last",
30
- "summary_use_proj": true,
31
- "torchscript": false,
32
- "untie_r": true
33
- }
 
 
 
 
 
 
2
  "architectures": [
3
  "XLNetLMHeadModel"
4
  ],
5
+ "attn_type": "bi",
6
+ "bi_data": false,
7
+ "clamp_len": -1,
8
+ "d_head": 64,
9
+ "d_inner": 4096,
10
+ "d_model": 1024,
11
+ "dropout": 0.1,
12
+ "end_n_top": 5,
13
+ "ff_activation": "gelu",
14
+ "finetuning_task": null,
15
+ "initializer_range": 0.02,
16
+ "layer_norm_eps": 1e-12,
17
+ "mem_len": null,
18
+ "n_head": 16,
19
+ "n_layer": 24,
20
+ "n_token": 32000,
21
+ "num_labels": 2,
22
+ "output_attentions": false,
23
+ "output_hidden_states": false,
24
+ "reuse_len": null,
25
+ "same_length": false,
26
+ "start_n_top": 5,
27
+ "summary_activation": "tanh",
28
+ "summary_last_dropout": 0.1,
29
+ "summary_type": "last",
30
+ "summary_use_proj": true,
31
+ "task_specific_params": {
32
+ "text_generation": {
33
+ "max_length": 210
34
+ }
35
+ },
36
+ "torchscript": false,
37
+ "untie_r": true
38
+ }