fsicoli commited on
Commit
ddc2ba9
1 Parent(s): 1828686

Upload 9 files

Browse files
Files changed (3) hide show
  1. config.json +4 -5
  2. tokenizer.json +0 -0
  3. vocab.json +0 -0
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "_name_or_path": "openai/whisper-large-v3",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
- "apply_spec_augment": false,
6
  "architectures": [
7
  "WhisperForConditionalGeneration"
8
  ],
@@ -43,10 +43,9 @@
43
  "num_mel_bins": 128,
44
  "pad_token_id": 50256,
45
  "scale_embedding": false,
46
- "suppress_tokens": [],
47
- "torch_dtype": "float32",
48
- "transformers_version": "4.36.0.dev0",
49
- "use_cache": false,
50
  "use_weighted_layer_sum": false,
51
  "vocab_size": 51866
52
  }
 
2
  "_name_or_path": "openai/whisper-large-v3",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "apply_spec_augment": true,
6
  "architectures": [
7
  "WhisperForConditionalGeneration"
8
  ],
 
43
  "num_mel_bins": 128,
44
  "pad_token_id": 50256,
45
  "scale_embedding": false,
46
+ "torch_dtype": "float16",
47
+ "transformers_version": "4.37.0.dev0",
48
+ "use_cache": true,
 
49
  "use_weighted_layer_sum": false,
50
  "vocab_size": 51866
51
  }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
vocab.json CHANGED
The diff for this file is too large to render. See raw diff