Patrick von Platen commited on
Commit
2205864
1 Parent(s): e2ef6e6

correct config

Browse files
Files changed (1) hide show
  1. config.json +0 -30
config.json CHANGED
@@ -14,42 +14,12 @@
14
  "is_encoder_decoder": true,
15
  "layer_norm_epsilon": 1e-06,
16
  "model_type": "t5",
17
- "n_positions": 512,
18
  "num_decoder_layers": 24,
19
  "num_heads": 32,
20
  "num_layers": 24,
21
  "output_past": true,
22
  "pad_token_id": 0,
23
  "relative_attention_num_buckets": 32,
24
- "task_specific_params": {
25
- "summarization": {
26
- "early_stopping": true,
27
- "length_penalty": 2.0,
28
- "max_length": 200,
29
- "min_length": 30,
30
- "no_repeat_ngram_size": 3,
31
- "num_beams": 4,
32
- "prefix": "summarize: "
33
- },
34
- "translation_en_to_de": {
35
- "early_stopping": true,
36
- "max_length": 300,
37
- "num_beams": 4,
38
- "prefix": "translate English to German: "
39
- },
40
- "translation_en_to_fr": {
41
- "early_stopping": true,
42
- "max_length": 300,
43
- "num_beams": 4,
44
- "prefix": "translate English to French: "
45
- },
46
- "translation_en_to_ro": {
47
- "early_stopping": true,
48
- "max_length": 300,
49
- "num_beams": 4,
50
- "prefix": "translate English to Romanian: "
51
- }
52
- },
53
  "tie_word_embeddings": false,
54
  "vocab_size": 32128
55
  }
 
14
  "is_encoder_decoder": true,
15
  "layer_norm_epsilon": 1e-06,
16
  "model_type": "t5",
 
17
  "num_decoder_layers": 24,
18
  "num_heads": 32,
19
  "num_layers": 24,
20
  "output_past": true,
21
  "pad_token_id": 0,
22
  "relative_attention_num_buckets": 32,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  "tie_word_embeddings": false,
24
  "vocab_size": 32128
25
  }