ArthurZ HF staff commited on
Commit
a08dc56
1 Parent(s): 62a7fcb

Upload WhisperForConditionalGeneration

Browse files
Files changed (1) hide show
  1. config.json +6 -1
config.json CHANGED
@@ -1,6 +1,10 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
 
 
 
4
  "attention_dropout": 0.0,
5
  "bos_token_id": 50257,
6
  "d_model": 384,
@@ -8,7 +12,7 @@
8
  "decoder_ffn_dim": 1536,
9
  "decoder_layerdrop": 0.0,
10
  "decoder_layers": 4,
11
- "decoder_start_token_id": 50257,
12
  "dropout": 0.0,
13
  "encoder_attention_heads": 6,
14
  "encoder_ffn_dim": 1536,
@@ -112,6 +116,7 @@
112
  "num_mel_bins": 80,
113
  "pad_token_id": 0,
114
  "scale_embedding": false,
 
115
  "transformers_version": "4.23.0.dev0",
116
  "use_cache": true,
117
  "vocab_size": 51865
 
1
  {
2
+ "_name_or_path": "openai/whisper-tiny",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "architectures": [
6
+ "WhisperForConditionalGeneration"
7
+ ],
8
  "attention_dropout": 0.0,
9
  "bos_token_id": 50257,
10
  "d_model": 384,
 
12
  "decoder_ffn_dim": 1536,
13
  "decoder_layerdrop": 0.0,
14
  "decoder_layers": 4,
15
+ "decoder_start_token_id": 50258,
16
  "dropout": 0.0,
17
  "encoder_attention_heads": 6,
18
  "encoder_ffn_dim": 1536,
 
116
  "num_mel_bins": 80,
117
  "pad_token_id": 0,
118
  "scale_embedding": false,
119
+ "torch_dtype": "float32",
120
  "transformers_version": "4.23.0.dev0",
121
  "use_cache": true,
122
  "vocab_size": 51865