ArthurZ HF staff commited on
Commit
1bed27e
1 Parent(s): 9dbbab8

Upload config

Browse files
Files changed (1) hide show
  1. config.json +9 -12
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "openai/whisper-large",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
- "architectures": [
6
- "WhisperForConditionalGeneration"
7
- ],
8
  "attention_dropout": 0.0,
 
 
 
 
9
  "bos_token_id": 50257,
10
  "d_model": 1280,
11
  "decoder_attention_heads": 20,
@@ -19,14 +19,16 @@
19
  "encoder_layerdrop": 0.0,
20
  "encoder_layers": 32,
21
  "eos_token_id": 50257,
22
- "feature_size": 1,
23
  "init_std": 0.02,
24
- "input_channels": 1,
25
  "is_encoder_decoder": true,
26
  "max_source_positions": 1500,
27
  "max_target_positions": 448,
28
  "model_type": "whisper",
29
- "non_speech_tokens": [
 
 
 
 
30
  1,
31
  2,
32
  6,
@@ -115,11 +117,6 @@
115
  50360,
116
  50359
117
  ],
118
- "num_hidden_layers": 32,
119
- "num_mel_bins": 80,
120
- "pad_token_id": 0,
121
- "scale_embedding": false,
122
- "torch_dtype": "float32",
123
  "transformers_version": "4.23.0.dev0",
124
  "use_cache": true,
125
  "vocab_size": 51865
 
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
 
 
 
4
  "attention_dropout": 0.0,
5
+ "begin_suppress_tokens": [
6
+ 220,
7
+ 50257
8
+ ],
9
  "bos_token_id": 50257,
10
  "d_model": 1280,
11
  "decoder_attention_heads": 20,
 
19
  "encoder_layerdrop": 0.0,
20
  "encoder_layers": 32,
21
  "eos_token_id": 50257,
 
22
  "init_std": 0.02,
 
23
  "is_encoder_decoder": true,
24
  "max_source_positions": 1500,
25
  "max_target_positions": 448,
26
  "model_type": "whisper",
27
+ "num_hidden_layers": 32,
28
+ "num_mel_bins": 80,
29
+ "pad_token_id": 0,
30
+ "scale_embedding": false,
31
+ "suppress_tokens": [
32
  1,
33
  2,
34
  6,
 
117
  50360,
118
  50359
119
  ],
 
 
 
 
 
120
  "transformers_version": "4.23.0.dev0",
121
  "use_cache": true,
122
  "vocab_size": 51865