manuelciosici commited on
Commit
757caeb
1 Parent(s): 9269a80

Remove task-specific parameters

Browse files
Files changed (1) hide show
  1. config.json +1 -29
config.json CHANGED
@@ -21,35 +21,7 @@
21
  "output_past": true,
22
  "pad_token_id": 0,
23
  "relative_attention_num_buckets": 32,
24
- "task_specific_params": {
25
- "summarization": {
26
- "early_stopping": true,
27
- "length_penalty": 2.0,
28
- "max_length": 200,
29
- "min_length": 30,
30
- "no_repeat_ngram_size": 3,
31
- "num_beams": 4,
32
- "prefix": "summarize: "
33
- },
34
- "translation_en_to_de": {
35
- "early_stopping": true,
36
- "max_length": 300,
37
- "num_beams": 4,
38
- "prefix": "translate English to German: "
39
- },
40
- "translation_en_to_fr": {
41
- "early_stopping": true,
42
- "max_length": 300,
43
- "num_beams": 4,
44
- "prefix": "translate English to French: "
45
- },
46
- "translation_en_to_ro": {
47
- "early_stopping": true,
48
- "max_length": 300,
49
- "num_beams": 4,
50
- "prefix": "translate English to Romanian: "
51
- }
52
- },
53
  "torch_dtype": "float32",
54
  "transformers_version": "4.15.0",
55
  "use_cache": true,
21
  "output_past": true,
22
  "pad_token_id": 0,
23
  "relative_attention_num_buckets": 32,
24
+ "task_specific_params": {},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "torch_dtype": "float32",
26
  "transformers_version": "4.15.0",
27
  "use_cache": true,