abdulfatir commited on
Commit
50cc7d4
1 Parent(s): b73f9bd

Remove irrelevant stuff from config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -30
config.json CHANGED
@@ -19,39 +19,9 @@
19
  "num_decoder_layers": 12,
20
  "num_heads": 12,
21
  "num_layers": 12,
22
- "output_past": true,
23
  "pad_token_id": 0,
24
  "relative_attention_max_distance": 128,
25
  "relative_attention_num_buckets": 32,
26
- "task_specific_params": {
27
- "summarization": {
28
- "early_stopping": true,
29
- "length_penalty": 2.0,
30
- "max_length": 200,
31
- "min_length": 30,
32
- "no_repeat_ngram_size": 3,
33
- "num_beams": 4,
34
- "prefix": "summarize: "
35
- },
36
- "translation_en_to_de": {
37
- "early_stopping": true,
38
- "max_length": 300,
39
- "num_beams": 4,
40
- "prefix": "translate English to German: "
41
- },
42
- "translation_en_to_fr": {
43
- "early_stopping": true,
44
- "max_length": 300,
45
- "num_beams": 4,
46
- "prefix": "translate English to French: "
47
- },
48
- "translation_en_to_ro": {
49
- "early_stopping": true,
50
- "max_length": 300,
51
- "num_beams": 4,
52
- "prefix": "translate English to Romanian: "
53
- }
54
- },
55
  "torch_dtype": "bfloat16",
56
  "transformers_version": "4.31.0",
57
  "use_cache": true,
 
19
  "num_decoder_layers": 12,
20
  "num_heads": 12,
21
  "num_layers": 12,
 
22
  "pad_token_id": 0,
23
  "relative_attention_max_distance": 128,
24
  "relative_attention_num_buckets": 32,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.31.0",
27
  "use_cache": true,