system HF staff commited on
Commit
295a972
1 Parent(s): c01b758

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -27
config.json CHANGED
@@ -1,41 +1,23 @@
1
  {
2
- "architectures": null,
3
  "attn_type": "bi",
4
  "bi_data": false,
 
5
  "clamp_len": -1,
6
  "d_head": 64,
7
  "d_inner": 4096,
8
  "d_model": 1024,
9
- "do_sample": false,
10
  "dropout": 0.1,
11
  "end_n_top": 5,
 
12
  "ff_activation": "relu",
13
- "finetuning_task": null,
14
- "id2label": {
15
- "0": "LABEL_0",
16
- "1": "LABEL_1"
17
- },
18
  "initializer_range": 0.02,
19
- "is_decoder": false,
20
- "label2id": {
21
- "LABEL_0": 0,
22
- "LABEL_1": 1
23
- },
24
  "layer_norm_eps": 1e-12,
25
- "length_penalty": 1.0,
26
- "max_length": 20,
27
  "mem_len": null,
28
  "model_type": "xlnet",
29
  "n_head": 16,
30
  "n_layer": 24,
31
- "num_beams": 1,
32
- "num_labels": 2,
33
- "num_return_sequences": 1,
34
- "output_attentions": false,
35
- "output_hidden_states": false,
36
  "output_past": true,
37
- "pruned_heads": {},
38
- "repetition_penalty": 1.0,
39
  "reuse_len": null,
40
  "same_length": false,
41
  "start_n_top": 5,
@@ -43,11 +25,6 @@
43
  "summary_last_dropout": 0.1,
44
  "summary_type": "last",
45
  "summary_use_proj": true,
46
- "temperature": 1.0,
47
- "top_k": 50,
48
- "top_p": 1.0,
49
- "torchscript": false,
50
  "untie_r": true,
51
- "use_bfloat16": false,
52
  "vocab_size": 32000
53
- }
1
  {
 
2
  "attn_type": "bi",
3
  "bi_data": false,
4
+ "bos_token_id": 1,
5
  "clamp_len": -1,
6
  "d_head": 64,
7
  "d_inner": 4096,
8
  "d_model": 1024,
 
9
  "dropout": 0.1,
10
  "end_n_top": 5,
11
+ "eos_token_id": 2,
12
  "ff_activation": "relu",
 
 
 
 
 
13
  "initializer_range": 0.02,
 
 
 
 
 
14
  "layer_norm_eps": 1e-12,
 
 
15
  "mem_len": null,
16
  "model_type": "xlnet",
17
  "n_head": 16,
18
  "n_layer": 24,
 
 
 
 
 
19
  "output_past": true,
20
+ "pad_token_id": 5,
 
21
  "reuse_len": null,
22
  "same_length": false,
23
  "start_n_top": 5,
25
  "summary_last_dropout": 0.1,
26
  "summary_type": "last",
27
  "summary_use_proj": true,
 
 
 
 
28
  "untie_r": true,
 
29
  "vocab_size": 32000
30
+ }