ek9624 commited on
Commit
044263f
1 Parent(s): 1b1337e

End of training

Browse files
README.md ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - sq
4
+ base_model: openai/whisper-distil-large-v2-ealb
5
+ tags:
6
+ - generated_from_trainer
7
+ datasets:
8
+ - common_voice_17_0
9
+ model-index:
10
+ - name: Whisper Large EALB
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # Whisper Large EALB
18
+
19
+ This model is a fine-tuned version of [openai/whisper-distil-large-v2-ealb](https://huggingface.co/openai/whisper-distil-large-v2-ealb) on the common_voice_17_0 dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-05
39
+ - train_batch_size: 16
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
+ - lr_scheduler_type: linear
44
+ - training_steps: 2634
45
+ - mixed_precision_training: Native AMP
46
+
47
+ ### Training results
48
+
49
+
50
+
51
+ ### Framework versions
52
+
53
+ - Transformers 4.41.0
54
+ - Pytorch 2.2.1
55
+ - Datasets 2.19.1
56
+ - Tokenizers 0.19.1
generation_config.json ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 1,
5
+ 0
6
+ ],
7
+ [
8
+ 1,
9
+ 1
10
+ ],
11
+ [
12
+ 1,
13
+ 2
14
+ ],
15
+ [
16
+ 1,
17
+ 3
18
+ ],
19
+ [
20
+ 1,
21
+ 4
22
+ ],
23
+ [
24
+ 1,
25
+ 5
26
+ ],
27
+ [
28
+ 1,
29
+ 6
30
+ ],
31
+ [
32
+ 1,
33
+ 7
34
+ ],
35
+ [
36
+ 1,
37
+ 8
38
+ ],
39
+ [
40
+ 1,
41
+ 9
42
+ ],
43
+ [
44
+ 1,
45
+ 10
46
+ ],
47
+ [
48
+ 1,
49
+ 11
50
+ ],
51
+ [
52
+ 1,
53
+ 12
54
+ ],
55
+ [
56
+ 1,
57
+ 13
58
+ ],
59
+ [
60
+ 1,
61
+ 14
62
+ ],
63
+ [
64
+ 1,
65
+ 15
66
+ ],
67
+ [
68
+ 1,
69
+ 16
70
+ ],
71
+ [
72
+ 1,
73
+ 17
74
+ ],
75
+ [
76
+ 1,
77
+ 18
78
+ ],
79
+ [
80
+ 1,
81
+ 19
82
+ ]
83
+ ],
84
+ "begin_suppress_tokens": [
85
+ 220,
86
+ 50257
87
+ ],
88
+ "bos_token_id": 50257,
89
+ "decoder_start_token_id": 50258,
90
+ "eos_token_id": 50257,
91
+ "is_multilingual": true,
92
+ "lang_to_id": {
93
+ "<|af|>": 50327,
94
+ "<|am|>": 50334,
95
+ "<|ar|>": 50272,
96
+ "<|as|>": 50350,
97
+ "<|az|>": 50304,
98
+ "<|ba|>": 50355,
99
+ "<|be|>": 50330,
100
+ "<|bg|>": 50292,
101
+ "<|bn|>": 50302,
102
+ "<|bo|>": 50347,
103
+ "<|br|>": 50309,
104
+ "<|bs|>": 50315,
105
+ "<|ca|>": 50270,
106
+ "<|cs|>": 50283,
107
+ "<|cy|>": 50297,
108
+ "<|da|>": 50285,
109
+ "<|de|>": 50261,
110
+ "<|el|>": 50281,
111
+ "<|en|>": 50259,
112
+ "<|es|>": 50262,
113
+ "<|et|>": 50307,
114
+ "<|eu|>": 50310,
115
+ "<|fa|>": 50300,
116
+ "<|fi|>": 50277,
117
+ "<|fo|>": 50338,
118
+ "<|fr|>": 50265,
119
+ "<|gl|>": 50319,
120
+ "<|gu|>": 50333,
121
+ "<|haw|>": 50352,
122
+ "<|ha|>": 50354,
123
+ "<|he|>": 50279,
124
+ "<|hi|>": 50276,
125
+ "<|hr|>": 50291,
126
+ "<|ht|>": 50339,
127
+ "<|hu|>": 50286,
128
+ "<|hy|>": 50312,
129
+ "<|id|>": 50275,
130
+ "<|is|>": 50311,
131
+ "<|it|>": 50274,
132
+ "<|ja|>": 50266,
133
+ "<|jw|>": 50356,
134
+ "<|ka|>": 50329,
135
+ "<|kk|>": 50316,
136
+ "<|km|>": 50323,
137
+ "<|kn|>": 50306,
138
+ "<|ko|>": 50264,
139
+ "<|la|>": 50294,
140
+ "<|lb|>": 50345,
141
+ "<|ln|>": 50353,
142
+ "<|lo|>": 50336,
143
+ "<|lt|>": 50293,
144
+ "<|lv|>": 50301,
145
+ "<|mg|>": 50349,
146
+ "<|mi|>": 50295,
147
+ "<|mk|>": 50308,
148
+ "<|ml|>": 50296,
149
+ "<|mn|>": 50314,
150
+ "<|mr|>": 50320,
151
+ "<|ms|>": 50282,
152
+ "<|mt|>": 50343,
153
+ "<|my|>": 50346,
154
+ "<|ne|>": 50313,
155
+ "<|nl|>": 50271,
156
+ "<|nn|>": 50342,
157
+ "<|no|>": 50288,
158
+ "<|oc|>": 50328,
159
+ "<|pa|>": 50321,
160
+ "<|pl|>": 50269,
161
+ "<|ps|>": 50340,
162
+ "<|pt|>": 50267,
163
+ "<|ro|>": 50284,
164
+ "<|ru|>": 50263,
165
+ "<|sa|>": 50344,
166
+ "<|sd|>": 50332,
167
+ "<|si|>": 50322,
168
+ "<|sk|>": 50298,
169
+ "<|sl|>": 50305,
170
+ "<|sn|>": 50324,
171
+ "<|so|>": 50326,
172
+ "<|sq|>": 50317,
173
+ "<|sr|>": 50303,
174
+ "<|su|>": 50357,
175
+ "<|sv|>": 50273,
176
+ "<|sw|>": 50318,
177
+ "<|ta|>": 50287,
178
+ "<|te|>": 50299,
179
+ "<|tg|>": 50331,
180
+ "<|th|>": 50289,
181
+ "<|tk|>": 50341,
182
+ "<|tl|>": 50348,
183
+ "<|tr|>": 50268,
184
+ "<|tt|>": 50351,
185
+ "<|uk|>": 50280,
186
+ "<|ur|>": 50290,
187
+ "<|uz|>": 50337,
188
+ "<|vi|>": 50278,
189
+ "<|yi|>": 50335,
190
+ "<|yo|>": 50325,
191
+ "<|yue|>": 50358,
192
+ "<|zh|>": 50260
193
+ },
194
+ "language": "albanian",
195
+ "max_initial_timestamp_index": 50,
196
+ "max_length": 448,
197
+ "no_timestamps_token_id": 50364,
198
+ "pad_token_id": 50257,
199
+ "prev_sot_token_id": 50362,
200
+ "return_timestamps": false,
201
+ "suppress_tokens": [
202
+ 1,
203
+ 2,
204
+ 7,
205
+ 8,
206
+ 9,
207
+ 10,
208
+ 14,
209
+ 25,
210
+ 26,
211
+ 27,
212
+ 28,
213
+ 29,
214
+ 31,
215
+ 58,
216
+ 59,
217
+ 60,
218
+ 61,
219
+ 62,
220
+ 63,
221
+ 90,
222
+ 91,
223
+ 92,
224
+ 93,
225
+ 359,
226
+ 503,
227
+ 522,
228
+ 542,
229
+ 873,
230
+ 893,
231
+ 902,
232
+ 918,
233
+ 922,
234
+ 931,
235
+ 1350,
236
+ 1853,
237
+ 1982,
238
+ 2460,
239
+ 2627,
240
+ 3246,
241
+ 3253,
242
+ 3268,
243
+ 3536,
244
+ 3846,
245
+ 3961,
246
+ 4183,
247
+ 4667,
248
+ 6585,
249
+ 6647,
250
+ 7273,
251
+ 9061,
252
+ 9383,
253
+ 10428,
254
+ 10929,
255
+ 11938,
256
+ 12033,
257
+ 12331,
258
+ 12562,
259
+ 13793,
260
+ 14157,
261
+ 14635,
262
+ 15265,
263
+ 15618,
264
+ 16553,
265
+ 16604,
266
+ 18362,
267
+ 18956,
268
+ 20075,
269
+ 21675,
270
+ 22520,
271
+ 26130,
272
+ 26161,
273
+ 26435,
274
+ 28279,
275
+ 29464,
276
+ 31650,
277
+ 32302,
278
+ 32470,
279
+ 36865,
280
+ 42863,
281
+ 47425,
282
+ 49870,
283
+ 50254,
284
+ 50258,
285
+ 50359,
286
+ 50360,
287
+ 50361,
288
+ 50362,
289
+ 50363
290
+ ],
291
+ "task": "transcribe",
292
+ "task_to_id": {
293
+ "transcribe": 50360,
294
+ "translate": 50359
295
+ },
296
+ "transformers_version": "4.41.0"
297
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f794ab68bee726a76884460417cbd9d6cbf0fa4c5b9eb6adfdba38202a99766
3
  size 3025686376
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebc01bcdec267c2cc2e0ef0dda7b426121a653a83e84fd738e4c199f15a5cb53
3
  size 3025686376
runs/May22_15-46-03_f3b785841611/events.out.tfevents.1716392766.f3b785841611.1040.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:862371fb5d78f09bdf0abc46a528c890f94ed1bc29c27bdede35620ab3aa8d92
3
- size 26711
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4f18a8b4cd0174b875d403e44d311608b01a8c65c46f640d86fada4ec8b0bfe
3
+ size 28120