system HF staff commited on
Commit
64d5694
1 Parent(s): ff569ee

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -8
config.json CHANGED
@@ -4,23 +4,18 @@
4
  "architectures": [
5
  "ProphetNetForConditionalGeneration"
6
  ],
7
- "ngram": 2,
8
  "attention_dropout": 0.1,
9
  "bos_token_id": 102,
10
- "hidden_size": 1024,
11
- "decoder_attention_heads": 16,
12
  "decoder_ffn_dim": 4096,
13
  "decoder_layerdrop": 0.0,
14
- "decoder_layers": 12,
15
  "decoder_start_token_id": 102,
16
  "disable_ngram_loss": false,
17
  "dropout": 0.1,
18
- "encoder_attention_heads": 16,
19
  "encoder_ffn_dim": 4096,
20
  "encoder_layerdrop": 0.0,
21
- "encoder_layers": 12,
22
  "eos_token_id": 102,
23
  "eps": 0.0,
 
24
  "id2label": {
25
  "0": "LABEL_0",
26
  "1": "LABEL_1",
@@ -38,7 +33,10 @@
38
  "model_type": "prophetnet",
39
  "ngram": 2,
40
  "num_buckets": 32,
41
- "num_hidden_layers": 12,
 
 
 
42
  "output_past": false,
43
  "pad_token_id": 0,
44
  "prefix": " ",
@@ -52,4 +50,4 @@
52
  }
53
  },
54
  "vocab_size": 30522
55
- }
4
  "architectures": [
5
  "ProphetNetForConditionalGeneration"
6
  ],
 
7
  "attention_dropout": 0.1,
8
  "bos_token_id": 102,
 
 
9
  "decoder_ffn_dim": 4096,
10
  "decoder_layerdrop": 0.0,
 
11
  "decoder_start_token_id": 102,
12
  "disable_ngram_loss": false,
13
  "dropout": 0.1,
 
14
  "encoder_ffn_dim": 4096,
15
  "encoder_layerdrop": 0.0,
 
16
  "eos_token_id": 102,
17
  "eps": 0.0,
18
+ "hidden_size": 1024,
19
  "id2label": {
20
  "0": "LABEL_0",
21
  "1": "LABEL_1",
33
  "model_type": "prophetnet",
34
  "ngram": 2,
35
  "num_buckets": 32,
36
+ "num_decoder_attention_heads": 16,
37
+ "num_decoder_layers": 12,
38
+ "num_encoder_attention_heads": 16,
39
+ "num_encoder_layers": 12,
40
  "output_past": false,
41
  "pad_token_id": 0,
42
  "prefix": " ",
50
  }
51
  },
52
  "vocab_size": 30522
53
+ }