bofenghuang commited on
Commit
9500a7f
1 Parent(s): c716546

Update config

Browse files
Files changed (1) hide show
  1. config.json +2 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/gpfswork/rech/cjc/commun/models/whisper/bofenghuang-whisper_large_v3_french_dec4_init",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
@@ -25,7 +25,6 @@
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 32,
27
  "eos_token_id": 50257,
28
- "forced_decoder_ids": null,
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
  "mask_feature_length": 10,
@@ -45,7 +44,7 @@
45
  "scale_embedding": false,
46
  "torch_dtype": "float32",
47
  "transformers_version": "4.36.0.dev0",
48
- "use_cache": false,
49
  "use_weighted_layer_sum": false,
50
  "vocab_size": 51866
51
  }
 
1
  {
2
+ "_name_or_path": "bofenghuang/whisper-large-v3-french-dec4-init",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
 
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 32,
27
  "eos_token_id": 50257,
 
28
  "init_std": 0.02,
29
  "is_encoder_decoder": true,
30
  "mask_feature_length": 10,
 
44
  "scale_embedding": false,
45
  "torch_dtype": "float32",
46
  "transformers_version": "4.36.0.dev0",
47
+ "use_cache": true,
48
  "use_weighted_layer_sum": false,
49
  "vocab_size": 51866
50
  }