system HF staff commited on
Commit
6c310a9
1 Parent(s): b3582e1

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "activation_dropout": 0.1,
3
  "activation_function": "gelu",
4
  "architectures": [
5
- "ProphetNetForConditionalGeneration"
6
  ],
7
  "attention_dropout": 0.1,
8
  "bos_token_id": 2,
@@ -28,9 +28,9 @@
28
  "LABEL_1": 1,
29
  "LABEL_2": 2
30
  },
31
- "max_length": 256,
32
  "max_position_embeddings": 512,
33
- "model_type": "prophetnet",
34
  "ngram": 2,
35
  "num_buckets": 32,
36
  "num_decoder_attention_heads": 16,
@@ -44,10 +44,10 @@
44
  "task_specific_params": {
45
  "summarization": {
46
  "early_stopping": true,
47
- "length_penalty": 1.0,
48
  "no_repeat_ngram_size": 3,
49
- "num_beams": 10
50
  }
51
  },
52
  "vocab_size": 250012
53
- }
 
2
  "activation_dropout": 0.1,
3
  "activation_function": "gelu",
4
  "architectures": [
5
+ "XLMProphetNetForConditionalGeneration"
6
  ],
7
  "attention_dropout": 0.1,
8
  "bos_token_id": 2,
 
28
  "LABEL_1": 1,
29
  "LABEL_2": 2
30
  },
31
+ "max_length": 512,
32
  "max_position_embeddings": 512,
33
+ "model_type": "xlm-prophetnet",
34
  "ngram": 2,
35
  "num_buckets": 32,
36
  "num_decoder_attention_heads": 16,
 
44
  "task_specific_params": {
45
  "summarization": {
46
  "early_stopping": true,
47
+ "length_penalty": 2.0,
48
  "no_repeat_ngram_size": 3,
49
+ "num_beams": 4
50
  }
51
  },
52
  "vocab_size": 250012
53
+ }