pszemraj commited on
Commit
046a3af
1 Parent(s): 835cee3

remove task specific params

Browse files
Files changed (1) hide show
  1. config.json +0 -20
config.json CHANGED
@@ -48,26 +48,6 @@
48
  "num_hidden_layers": 6,
49
  "pad_token_id": 1,
50
  "scale_embedding": false,
51
- "task_specific_params": {
52
- "summarization": {
53
- "length_penalty": 1.0,
54
- "max_length": 128,
55
- "min_length": 12,
56
- "num_beams": 4
57
- },
58
- "summarization_cnn": {
59
- "length_penalty": 2.0,
60
- "max_length": 142,
61
- "min_length": 56,
62
- "num_beams": 4
63
- },
64
- "summarization_xsum": {
65
- "length_penalty": 1.0,
66
- "max_length": 62,
67
- "min_length": 11,
68
- "num_beams": 6
69
- }
70
- },
71
  "torch_dtype": "float32",
72
  "transformers_version": "4.28.1",
73
  "use_cache": true,
 
48
  "num_hidden_layers": 6,
49
  "pad_token_id": 1,
50
  "scale_embedding": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  "torch_dtype": "float32",
52
  "transformers_version": "4.28.1",
53
  "use_cache": true,