Commit
•
1f78528
1
Parent(s):
ca3e273
Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
5 |
"apply_spec_augment": false,
|
@@ -11,7 +11,7 @@
|
|
11 |
220,
|
12 |
50256
|
13 |
],
|
14 |
-
"bos_token_id":
|
15 |
"classifier_proj_size": 256,
|
16 |
"d_model": 1280,
|
17 |
"decoder_attention_heads": 20,
|
@@ -24,7 +24,7 @@
|
|
24 |
"encoder_ffn_dim": 5120,
|
25 |
"encoder_layerdrop": 0.0,
|
26 |
"encoder_layers": 32,
|
27 |
-
"eos_token_id":
|
28 |
"init_std": 0.02,
|
29 |
"is_encoder_decoder": true,
|
30 |
"mask_feature_length": 10,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "openai/whisper-large-v3",
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
5 |
"apply_spec_augment": false,
|
|
|
11 |
220,
|
12 |
50256
|
13 |
],
|
14 |
+
"bos_token_id": 50257,
|
15 |
"classifier_proj_size": 256,
|
16 |
"d_model": 1280,
|
17 |
"decoder_attention_heads": 20,
|
|
|
24 |
"encoder_ffn_dim": 5120,
|
25 |
"encoder_layerdrop": 0.0,
|
26 |
"encoder_layers": 32,
|
27 |
+
"eos_token_id": 50257,
|
28 |
"init_std": 0.02,
|
29 |
"is_encoder_decoder": true,
|
30 |
"mask_feature_length": 10,
|