bigmorning commited on
Commit
043aaa1
1 Parent(s): e672016

Upload TFWhisperForConditionalGeneration

Browse files
Files changed (3) hide show
  1. README.md +91 -0
  2. config.json +199 -0
  3. tf_model.h5 +3 -0
README.md ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ tags:
4
+ - generated_from_keras_callback
5
+ model-index:
6
+ - name: whisper_havest_0035
7
+ results: []
8
+ ---
9
+
10
+ <!-- This model card has been generated automatically according to the information Keras had access to. You should
11
+ probably proofread and complete it, then remove this comment. -->
12
+
13
+ # whisper_havest_0035
14
+
15
+ This model is a fine-tuned version of [openai/whisper-tiny](https://huggingface.co/openai/whisper-tiny) on an unknown dataset.
16
+ It achieves the following results on the evaluation set:
17
+ - Train Loss: 3.5897
18
+ - Train Accuracy: 0.0150
19
+ - Train Do Wer: 1.0
20
+ - Validation Loss: 4.5822
21
+ - Validation Accuracy: 0.0130
22
+ - Validation Do Wer: 1.0
23
+ - Epoch: 34
24
+
25
+ ## Model description
26
+
27
+ More information needed
28
+
29
+ ## Intended uses & limitations
30
+
31
+ More information needed
32
+
33
+ ## Training and evaluation data
34
+
35
+ More information needed
36
+
37
+ ## Training procedure
38
+
39
+ ### Training hyperparameters
40
+
41
+ The following hyperparameters were used during training:
42
+ - optimizer: {'name': 'AdamWeightDecay', 'learning_rate': 1e-05, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False, 'weight_decay_rate': 0.01}
43
+ - training_precision: float32
44
+
45
+ ### Training results
46
+
47
+ | Train Loss | Train Accuracy | Train Do Wer | Validation Loss | Validation Accuracy | Validation Do Wer | Epoch |
48
+ |:----------:|:--------------:|:------------:|:---------------:|:-------------------:|:-----------------:|:-----:|
49
+ | 9.9191 | 0.0046 | 1.0 | 8.5836 | 0.0067 | 1.0 | 0 |
50
+ | 8.0709 | 0.0083 | 1.0 | 7.4667 | 0.0089 | 1.0 | 1 |
51
+ | 7.1652 | 0.0100 | 1.0 | 6.8204 | 0.0112 | 1.0 | 2 |
52
+ | 6.7196 | 0.0114 | 1.0 | 6.5192 | 0.0114 | 1.0 | 3 |
53
+ | 6.4115 | 0.0115 | 1.0 | 6.2357 | 0.0115 | 1.0 | 4 |
54
+ | 6.1085 | 0.0115 | 1.0 | 5.9657 | 0.0115 | 1.0 | 5 |
55
+ | 5.8206 | 0.0115 | 1.0 | 5.7162 | 0.0115 | 1.0 | 6 |
56
+ | 5.5567 | 0.0115 | 1.0 | 5.4963 | 0.0115 | 1.0 | 7 |
57
+ | 5.3223 | 0.0116 | 1.0 | 5.3096 | 0.0116 | 1.0 | 8 |
58
+ | 5.1222 | 0.0117 | 1.0 | 5.1600 | 0.0117 | 1.0 | 9 |
59
+ | 4.9580 | 0.0117 | 1.0 | 5.0391 | 0.0118 | 1.0 | 10 |
60
+ | 4.8251 | 0.0119 | 1.0 | 4.9427 | 0.0118 | 1.0 | 11 |
61
+ | 4.7171 | 0.0119 | 1.0 | 4.8691 | 0.0119 | 1.0 | 12 |
62
+ | 4.6284 | 0.0121 | 1.0 | 4.8123 | 0.0120 | 1.0 | 13 |
63
+ | 4.5508 | 0.0121 | 1.0 | 4.7620 | 0.0121 | 1.0 | 14 |
64
+ | 4.4855 | 0.0123 | 1.0 | 4.7260 | 0.0121 | 1.0 | 15 |
65
+ | 4.4305 | 0.0124 | 1.0 | 4.7018 | 0.0123 | 1.0 | 16 |
66
+ | 4.3788 | 0.0125 | 1.0 | 4.6738 | 0.0123 | 1.0 | 17 |
67
+ | 4.3305 | 0.0127 | 1.0 | 4.6525 | 0.0124 | 1.0 | 18 |
68
+ | 4.2860 | 0.0128 | 1.0 | 4.6401 | 0.0125 | 1.0 | 19 |
69
+ | 4.2451 | 0.0130 | 1.0 | 4.6234 | 0.0126 | 1.0 | 20 |
70
+ | 4.1994 | 0.0132 | 1.0 | 4.6077 | 0.0128 | 1.0 | 21 |
71
+ | 4.1521 | 0.0133 | 1.0 | 4.6098 | 0.0129 | 1.0 | 22 |
72
+ | 4.1148 | 0.0134 | 1.0 | 4.5919 | 0.0129 | 1.0 | 23 |
73
+ | 4.0701 | 0.0135 | 1.0 | 4.6038 | 0.0128 | 1.0 | 24 |
74
+ | 4.0199 | 0.0137 | 1.0 | 4.5777 | 0.0130 | 1.0 | 25 |
75
+ | 3.9631 | 0.0138 | 1.0 | 4.5734 | 0.0131 | 1.0 | 26 |
76
+ | 3.9175 | 0.0140 | 1.0 | 4.5866 | 0.0129 | 1.0 | 27 |
77
+ | 3.8690 | 0.0142 | 1.0 | 4.5900 | 0.0129 | 1.0 | 28 |
78
+ | 3.8276 | 0.0143 | 1.0 | 4.5602 | 0.0131 | 1.0 | 29 |
79
+ | 3.7499 | 0.0145 | 1.0 | 4.5619 | 0.0132 | 1.0 | 30 |
80
+ | 3.6968 | 0.0147 | 1.0 | 4.6203 | 0.0133 | 1.0 | 31 |
81
+ | 3.6714 | 0.0149 | 1.0 | 4.7075 | 0.0133 | 1.0 | 32 |
82
+ | 3.6318 | 0.0149 | 1.0 | 4.6638 | 0.0125 | 1.0 | 33 |
83
+ | 3.5897 | 0.0150 | 1.0 | 4.5822 | 0.0130 | 1.0 | 34 |
84
+
85
+
86
+ ### Framework versions
87
+
88
+ - Transformers 4.25.0.dev0
89
+ - TensorFlow 2.9.2
90
+ - Datasets 2.6.1
91
+ - Tokenizers 0.13.2
config.json ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "openai/whisper-tiny",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "architectures": [
6
+ "WhisperForConditionalGeneration"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "begin_suppress_tokens": [
10
+ 220,
11
+ 50257
12
+ ],
13
+ "bos_token_id": 50257,
14
+ "d_model": 384,
15
+ "decoder_attention_heads": 6,
16
+ "decoder_ffn_dim": 1536,
17
+ "decoder_layerdrop": 0.0,
18
+ "decoder_layers": 4,
19
+ "decoder_start_token_id": 50258,
20
+ "dropout": 0.0,
21
+ "encoder_attention_heads": 6,
22
+ "encoder_ffn_dim": 1536,
23
+ "encoder_layerdrop": 0.0,
24
+ "encoder_layers": 4,
25
+ "eos_token_id": 50257,
26
+ "forced_decoder_ids": [
27
+ [
28
+ 1,
29
+ 50289
30
+ ],
31
+ [
32
+ 2,
33
+ 50359
34
+ ],
35
+ [
36
+ 3,
37
+ 50363
38
+ ]
39
+ ],
40
+ "init_std": 0.02,
41
+ "is_encoder_decoder": true,
42
+ "max_length": 448,
43
+ "max_source_positions": 1500,
44
+ "max_target_positions": 448,
45
+ "model_type": "whisper",
46
+ "num_hidden_layers": 4,
47
+ "num_mel_bins": 80,
48
+ "pad_token_id": 50257,
49
+ "scale_embedding": false,
50
+ "suppress_tokens": [
51
+ 0,
52
+ 1,
53
+ 2,
54
+ 3,
55
+ 4,
56
+ 10968,
57
+ 10969,
58
+ 10970,
59
+ 10971,
60
+ 10972,
61
+ 10973,
62
+ 10974,
63
+ 10975,
64
+ 10976,
65
+ 10977,
66
+ 10978,
67
+ 10979,
68
+ 10980,
69
+ 10981,
70
+ 10982,
71
+ 10983,
72
+ 10984,
73
+ 10985,
74
+ 10986,
75
+ 10987,
76
+ 10988,
77
+ 10989,
78
+ 10990,
79
+ 10991,
80
+ 10992,
81
+ 10993,
82
+ 10994,
83
+ 10995,
84
+ 10996,
85
+ 10997,
86
+ 50256,
87
+ 50257,
88
+ 50258,
89
+ 50259,
90
+ 50260,
91
+ 50261,
92
+ 50262,
93
+ 50263,
94
+ 50264,
95
+ 50265,
96
+ 50266,
97
+ 50267,
98
+ 50268,
99
+ 50269,
100
+ 50270,
101
+ 50271,
102
+ 50272,
103
+ 50273,
104
+ 50274,
105
+ 50275,
106
+ 50276,
107
+ 50277,
108
+ 50278,
109
+ 50279,
110
+ 50280,
111
+ 50281,
112
+ 50282,
113
+ 50283,
114
+ 50284,
115
+ 50285,
116
+ 50286,
117
+ 50287,
118
+ 50288,
119
+ 50289,
120
+ 50290,
121
+ 50291,
122
+ 50292,
123
+ 50293,
124
+ 50294,
125
+ 50295,
126
+ 50296,
127
+ 50297,
128
+ 50298,
129
+ 50299,
130
+ 50300,
131
+ 50301,
132
+ 50302,
133
+ 50303,
134
+ 50304,
135
+ 50305,
136
+ 50306,
137
+ 50307,
138
+ 50308,
139
+ 50309,
140
+ 50310,
141
+ 50311,
142
+ 50312,
143
+ 50313,
144
+ 50314,
145
+ 50315,
146
+ 50316,
147
+ 50317,
148
+ 50318,
149
+ 50319,
150
+ 50320,
151
+ 50321,
152
+ 50322,
153
+ 50323,
154
+ 50324,
155
+ 50325,
156
+ 50326,
157
+ 50327,
158
+ 50328,
159
+ 50329,
160
+ 50330,
161
+ 50331,
162
+ 50332,
163
+ 50333,
164
+ 50334,
165
+ 50335,
166
+ 50336,
167
+ 50337,
168
+ 50338,
169
+ 50339,
170
+ 50340,
171
+ 50341,
172
+ 50342,
173
+ 50343,
174
+ 50344,
175
+ 50345,
176
+ 50346,
177
+ 50347,
178
+ 50348,
179
+ 50349,
180
+ 50350,
181
+ 50351,
182
+ 50352,
183
+ 50353,
184
+ 50354,
185
+ 50355,
186
+ 50356,
187
+ 50357,
188
+ 50358,
189
+ 50359,
190
+ 50360,
191
+ 50361,
192
+ 50362,
193
+ 50363
194
+ ],
195
+ "torch_dtype": "float32",
196
+ "transformers_version": "4.25.0.dev0",
197
+ "use_cache": true,
198
+ "vocab_size": 51865
199
+ }
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2987a94e396c7148908b17a0675182e051d7faee8d9d9802357be4930a6a33ce
3
+ size 151253984