system HF staff commited on
Commit
0e87695
1 Parent(s): 8e1990d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -8
config.json CHANGED
@@ -5,22 +5,17 @@
5
  "ProphetNetForConditionalGeneration"
6
  ],
7
  "attention_dropout": 0.1,
8
- "ngram": 2,
9
  "bos_token_id": 2,
10
- "hidden_size": 1024,
11
- "decoder_attention_heads": 16,
12
  "decoder_ffn_dim": 4096,
13
  "decoder_layerdrop": 0.0,
14
- "decoder_layers": 12,
15
  "decoder_start_token_id": 2,
16
  "disable_ngram_loss": false,
17
  "dropout": 0.1,
18
- "encoder_attention_heads": 16,
19
  "encoder_ffn_dim": 4096,
20
  "encoder_layerdrop": 0.0,
21
- "encoder_layers": 12,
22
  "eos_token_id": 2,
23
  "eps": 0.0,
 
24
  "id2label": {
25
  "0": "LABEL_0",
26
  "1": "LABEL_1",
@@ -38,7 +33,10 @@
38
  "model_type": "prophetnet",
39
  "ngram": 2,
40
  "num_buckets": 32,
41
- "num_hidden_layers": 12,
 
 
 
42
  "output_past": false,
43
  "pad_token_id": 0,
44
  "prefix": " ",
@@ -52,4 +50,4 @@
52
  }
53
  },
54
  "vocab_size": 250012
55
- }
5
  "ProphetNetForConditionalGeneration"
6
  ],
7
  "attention_dropout": 0.1,
 
8
  "bos_token_id": 2,
 
 
9
  "decoder_ffn_dim": 4096,
10
  "decoder_layerdrop": 0.0,
 
11
  "decoder_start_token_id": 2,
12
  "disable_ngram_loss": false,
13
  "dropout": 0.1,
 
14
  "encoder_ffn_dim": 4096,
15
  "encoder_layerdrop": 0.0,
 
16
  "eos_token_id": 2,
17
  "eps": 0.0,
18
+ "hidden_size": 1024,
19
  "id2label": {
20
  "0": "LABEL_0",
21
  "1": "LABEL_1",
33
  "model_type": "prophetnet",
34
  "ngram": 2,
35
  "num_buckets": 32,
36
+ "num_decoder_attention_heads": 16,
37
+ "num_decoder_layers": 12,
38
+ "num_encoder_attention_heads": 16,
39
+ "num_encoder_layers": 12,
40
  "output_past": false,
41
  "pad_token_id": 0,
42
  "prefix": " ",
50
  }
51
  },
52
  "vocab_size": 250012
53
+ }