Upload FlaxWhisperForConditionalGeneration

#19
by Shir02021 - opened
Files changed (2) hide show
  1. config.json +11 -3
  2. generation_config.json +2 -4
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "_name_or_path": "openai/whisper-tiny",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
 
5
  "architectures": [
6
  "WhisperForConditionalGeneration"
7
  ],
@@ -11,6 +12,7 @@
11
  50257
12
  ],
13
  "bos_token_id": 50257,
 
14
  "d_model": 384,
15
  "decoder_attention_heads": 6,
16
  "decoder_ffn_dim": 1536,
@@ -39,9 +41,16 @@
39
  ],
40
  "init_std": 0.02,
41
  "is_encoder_decoder": true,
 
 
 
 
 
 
42
  "max_length": 448,
43
  "max_source_positions": 1500,
44
  "max_target_positions": 448,
 
45
  "model_type": "whisper",
46
  "num_hidden_layers": 4,
47
  "num_mel_bins": 80,
@@ -131,14 +140,13 @@
131
  49870,
132
  50254,
133
  50258,
134
- 50358,
135
- 50359,
136
  50360,
137
  50361,
138
  50362
139
  ],
140
  "torch_dtype": "float32",
141
- "transformers_version": "4.27.0.dev0",
142
  "use_cache": true,
 
143
  "vocab_size": 51865
144
  }
 
2
  "_name_or_path": "openai/whisper-tiny",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "apply_spec_augment": false,
6
  "architectures": [
7
  "WhisperForConditionalGeneration"
8
  ],
 
12
  50257
13
  ],
14
  "bos_token_id": 50257,
15
+ "classifier_proj_size": 256,
16
  "d_model": 384,
17
  "decoder_attention_heads": 6,
18
  "decoder_ffn_dim": 1536,
 
41
  ],
42
  "init_std": 0.02,
43
  "is_encoder_decoder": true,
44
+ "mask_feature_length": 10,
45
+ "mask_feature_min_masks": 0,
46
+ "mask_feature_prob": 0.0,
47
+ "mask_time_length": 10,
48
+ "mask_time_min_masks": 2,
49
+ "mask_time_prob": 0.05,
50
  "max_length": 448,
51
  "max_source_positions": 1500,
52
  "max_target_positions": 448,
53
+ "median_filter_width": 7,
54
  "model_type": "whisper",
55
  "num_hidden_layers": 4,
56
  "num_mel_bins": 80,
 
140
  49870,
141
  50254,
142
  50258,
 
 
143
  50360,
144
  50361,
145
  50362
146
  ],
147
  "torch_dtype": "float32",
148
+ "transformers_version": "4.31.0",
149
  "use_cache": true,
150
+ "use_weighted_layer_sum": false,
151
  "vocab_size": 51865
152
  }
generation_config.json CHANGED
@@ -207,8 +207,6 @@
207
  49870,
208
  50254,
209
  50258,
210
- 50358,
211
- 50359,
212
  50360,
213
  50361,
214
  50362
@@ -217,5 +215,5 @@
217
  "transcribe": 50359,
218
  "translate": 50358
219
  },
220
- "transformers_version": "4.27.0.dev0"
221
- }
 
207
  49870,
208
  50254,
209
  50258,
 
 
210
  50360,
211
  50361,
212
  50362
 
215
  "transcribe": 50359,
216
  "translate": 50358
217
  },
218
+ "transformers_version": "4.31.0"
219
+ }