sarpba commited on
Commit
e405128
1 Parent(s): c11c55c

faster whisper

Browse files
fp16/config.json ADDED
@@ -0,0 +1,223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 13,
5
+ 15
6
+ ],
7
+ [
8
+ 15,
9
+ 4
10
+ ],
11
+ [
12
+ 15,
13
+ 15
14
+ ],
15
+ [
16
+ 16,
17
+ 1
18
+ ],
19
+ [
20
+ 20,
21
+ 0
22
+ ],
23
+ [
24
+ 23,
25
+ 4
26
+ ]
27
+ ],
28
+ "lang_ids": [
29
+ 50259,
30
+ 50260,
31
+ 50261,
32
+ 50262,
33
+ 50263,
34
+ 50264,
35
+ 50265,
36
+ 50266,
37
+ 50267,
38
+ 50268,
39
+ 50269,
40
+ 50270,
41
+ 50271,
42
+ 50272,
43
+ 50273,
44
+ 50274,
45
+ 50275,
46
+ 50276,
47
+ 50277,
48
+ 50278,
49
+ 50279,
50
+ 50280,
51
+ 50281,
52
+ 50282,
53
+ 50283,
54
+ 50284,
55
+ 50285,
56
+ 50286,
57
+ 50287,
58
+ 50288,
59
+ 50289,
60
+ 50290,
61
+ 50291,
62
+ 50292,
63
+ 50293,
64
+ 50294,
65
+ 50295,
66
+ 50296,
67
+ 50297,
68
+ 50298,
69
+ 50299,
70
+ 50300,
71
+ 50301,
72
+ 50302,
73
+ 50303,
74
+ 50304,
75
+ 50305,
76
+ 50306,
77
+ 50307,
78
+ 50308,
79
+ 50309,
80
+ 50310,
81
+ 50311,
82
+ 50312,
83
+ 50313,
84
+ 50314,
85
+ 50315,
86
+ 50316,
87
+ 50317,
88
+ 50318,
89
+ 50319,
90
+ 50320,
91
+ 50321,
92
+ 50322,
93
+ 50323,
94
+ 50324,
95
+ 50325,
96
+ 50326,
97
+ 50327,
98
+ 50328,
99
+ 50329,
100
+ 50330,
101
+ 50331,
102
+ 50332,
103
+ 50333,
104
+ 50334,
105
+ 50335,
106
+ 50336,
107
+ 50337,
108
+ 50338,
109
+ 50339,
110
+ 50340,
111
+ 50341,
112
+ 50342,
113
+ 50343,
114
+ 50344,
115
+ 50345,
116
+ 50346,
117
+ 50347,
118
+ 50348,
119
+ 50349,
120
+ 50350,
121
+ 50351,
122
+ 50352,
123
+ 50353,
124
+ 50354,
125
+ 50355,
126
+ 50356,
127
+ 50357
128
+ ],
129
+ "suppress_ids": [
130
+ 1,
131
+ 2,
132
+ 7,
133
+ 8,
134
+ 9,
135
+ 10,
136
+ 14,
137
+ 25,
138
+ 26,
139
+ 27,
140
+ 28,
141
+ 29,
142
+ 31,
143
+ 58,
144
+ 59,
145
+ 60,
146
+ 61,
147
+ 62,
148
+ 63,
149
+ 90,
150
+ 91,
151
+ 92,
152
+ 93,
153
+ 359,
154
+ 503,
155
+ 522,
156
+ 542,
157
+ 873,
158
+ 893,
159
+ 902,
160
+ 918,
161
+ 922,
162
+ 931,
163
+ 1350,
164
+ 1853,
165
+ 1982,
166
+ 2460,
167
+ 2627,
168
+ 3246,
169
+ 3253,
170
+ 3268,
171
+ 3536,
172
+ 3846,
173
+ 3961,
174
+ 4183,
175
+ 4667,
176
+ 6585,
177
+ 6647,
178
+ 7273,
179
+ 9061,
180
+ 9383,
181
+ 10428,
182
+ 10929,
183
+ 11938,
184
+ 12033,
185
+ 12331,
186
+ 12562,
187
+ 13793,
188
+ 14157,
189
+ 14635,
190
+ 15265,
191
+ 15618,
192
+ 16553,
193
+ 16604,
194
+ 18362,
195
+ 18956,
196
+ 20075,
197
+ 21675,
198
+ 22520,
199
+ 26130,
200
+ 26161,
201
+ 26435,
202
+ 28279,
203
+ 29464,
204
+ 31650,
205
+ 32302,
206
+ 32470,
207
+ 36865,
208
+ 42863,
209
+ 47425,
210
+ 49870,
211
+ 50254,
212
+ 50258,
213
+ 50358,
214
+ 50359,
215
+ 50360,
216
+ 50361,
217
+ 50362
218
+ ],
219
+ "suppress_ids_begin": [
220
+ 220,
221
+ 50257
222
+ ]
223
+ }
fp16/model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d6946f187607d8216026130f199582b118c697d2e2502660f00a8799ed9a136
3
+ size 1527906453
fp16/preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 80,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
fp16/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
fp16/vocabulary.json ADDED
The diff for this file is too large to render. See raw diff
 
fp16/vocabulary.txt ADDED
The diff for this file is too large to render. See raw diff
 
fp32/config.json ADDED
@@ -0,0 +1,223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 13,
5
+ 15
6
+ ],
7
+ [
8
+ 15,
9
+ 4
10
+ ],
11
+ [
12
+ 15,
13
+ 15
14
+ ],
15
+ [
16
+ 16,
17
+ 1
18
+ ],
19
+ [
20
+ 20,
21
+ 0
22
+ ],
23
+ [
24
+ 23,
25
+ 4
26
+ ]
27
+ ],
28
+ "lang_ids": [
29
+ 50259,
30
+ 50260,
31
+ 50261,
32
+ 50262,
33
+ 50263,
34
+ 50264,
35
+ 50265,
36
+ 50266,
37
+ 50267,
38
+ 50268,
39
+ 50269,
40
+ 50270,
41
+ 50271,
42
+ 50272,
43
+ 50273,
44
+ 50274,
45
+ 50275,
46
+ 50276,
47
+ 50277,
48
+ 50278,
49
+ 50279,
50
+ 50280,
51
+ 50281,
52
+ 50282,
53
+ 50283,
54
+ 50284,
55
+ 50285,
56
+ 50286,
57
+ 50287,
58
+ 50288,
59
+ 50289,
60
+ 50290,
61
+ 50291,
62
+ 50292,
63
+ 50293,
64
+ 50294,
65
+ 50295,
66
+ 50296,
67
+ 50297,
68
+ 50298,
69
+ 50299,
70
+ 50300,
71
+ 50301,
72
+ 50302,
73
+ 50303,
74
+ 50304,
75
+ 50305,
76
+ 50306,
77
+ 50307,
78
+ 50308,
79
+ 50309,
80
+ 50310,
81
+ 50311,
82
+ 50312,
83
+ 50313,
84
+ 50314,
85
+ 50315,
86
+ 50316,
87
+ 50317,
88
+ 50318,
89
+ 50319,
90
+ 50320,
91
+ 50321,
92
+ 50322,
93
+ 50323,
94
+ 50324,
95
+ 50325,
96
+ 50326,
97
+ 50327,
98
+ 50328,
99
+ 50329,
100
+ 50330,
101
+ 50331,
102
+ 50332,
103
+ 50333,
104
+ 50334,
105
+ 50335,
106
+ 50336,
107
+ 50337,
108
+ 50338,
109
+ 50339,
110
+ 50340,
111
+ 50341,
112
+ 50342,
113
+ 50343,
114
+ 50344,
115
+ 50345,
116
+ 50346,
117
+ 50347,
118
+ 50348,
119
+ 50349,
120
+ 50350,
121
+ 50351,
122
+ 50352,
123
+ 50353,
124
+ 50354,
125
+ 50355,
126
+ 50356,
127
+ 50357
128
+ ],
129
+ "suppress_ids": [
130
+ 1,
131
+ 2,
132
+ 7,
133
+ 8,
134
+ 9,
135
+ 10,
136
+ 14,
137
+ 25,
138
+ 26,
139
+ 27,
140
+ 28,
141
+ 29,
142
+ 31,
143
+ 58,
144
+ 59,
145
+ 60,
146
+ 61,
147
+ 62,
148
+ 63,
149
+ 90,
150
+ 91,
151
+ 92,
152
+ 93,
153
+ 359,
154
+ 503,
155
+ 522,
156
+ 542,
157
+ 873,
158
+ 893,
159
+ 902,
160
+ 918,
161
+ 922,
162
+ 931,
163
+ 1350,
164
+ 1853,
165
+ 1982,
166
+ 2460,
167
+ 2627,
168
+ 3246,
169
+ 3253,
170
+ 3268,
171
+ 3536,
172
+ 3846,
173
+ 3961,
174
+ 4183,
175
+ 4667,
176
+ 6585,
177
+ 6647,
178
+ 7273,
179
+ 9061,
180
+ 9383,
181
+ 10428,
182
+ 10929,
183
+ 11938,
184
+ 12033,
185
+ 12331,
186
+ 12562,
187
+ 13793,
188
+ 14157,
189
+ 14635,
190
+ 15265,
191
+ 15618,
192
+ 16553,
193
+ 16604,
194
+ 18362,
195
+ 18956,
196
+ 20075,
197
+ 21675,
198
+ 22520,
199
+ 26130,
200
+ 26161,
201
+ 26435,
202
+ 28279,
203
+ 29464,
204
+ 31650,
205
+ 32302,
206
+ 32470,
207
+ 36865,
208
+ 42863,
209
+ 47425,
210
+ 49870,
211
+ 50254,
212
+ 50258,
213
+ 50358,
214
+ 50359,
215
+ 50360,
216
+ 50361,
217
+ 50362
218
+ ],
219
+ "suppress_ids_begin": [
220
+ 220,
221
+ 50257
222
+ ]
223
+ }
fp32/model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f238ab4e16b64ee271d1fb011edfd5bc6cdaea9e0b8d19b4f3b670b5a7377b15
3
+ size 3055769749
fp32/preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 80,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
fp32/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
fp32/vocabulary.json ADDED
The diff for this file is too large to render. See raw diff
 
fp32/vocabulary.txt ADDED
The diff for this file is too large to render. See raw diff
 
int8/config.json ADDED
@@ -0,0 +1,223 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 13,
5
+ 15
6
+ ],
7
+ [
8
+ 15,
9
+ 4
10
+ ],
11
+ [
12
+ 15,
13
+ 15
14
+ ],
15
+ [
16
+ 16,
17
+ 1
18
+ ],
19
+ [
20
+ 20,
21
+ 0
22
+ ],
23
+ [
24
+ 23,
25
+ 4
26
+ ]
27
+ ],
28
+ "lang_ids": [
29
+ 50259,
30
+ 50260,
31
+ 50261,
32
+ 50262,
33
+ 50263,
34
+ 50264,
35
+ 50265,
36
+ 50266,
37
+ 50267,
38
+ 50268,
39
+ 50269,
40
+ 50270,
41
+ 50271,
42
+ 50272,
43
+ 50273,
44
+ 50274,
45
+ 50275,
46
+ 50276,
47
+ 50277,
48
+ 50278,
49
+ 50279,
50
+ 50280,
51
+ 50281,
52
+ 50282,
53
+ 50283,
54
+ 50284,
55
+ 50285,
56
+ 50286,
57
+ 50287,
58
+ 50288,
59
+ 50289,
60
+ 50290,
61
+ 50291,
62
+ 50292,
63
+ 50293,
64
+ 50294,
65
+ 50295,
66
+ 50296,
67
+ 50297,
68
+ 50298,
69
+ 50299,
70
+ 50300,
71
+ 50301,
72
+ 50302,
73
+ 50303,
74
+ 50304,
75
+ 50305,
76
+ 50306,
77
+ 50307,
78
+ 50308,
79
+ 50309,
80
+ 50310,
81
+ 50311,
82
+ 50312,
83
+ 50313,
84
+ 50314,
85
+ 50315,
86
+ 50316,
87
+ 50317,
88
+ 50318,
89
+ 50319,
90
+ 50320,
91
+ 50321,
92
+ 50322,
93
+ 50323,
94
+ 50324,
95
+ 50325,
96
+ 50326,
97
+ 50327,
98
+ 50328,
99
+ 50329,
100
+ 50330,
101
+ 50331,
102
+ 50332,
103
+ 50333,
104
+ 50334,
105
+ 50335,
106
+ 50336,
107
+ 50337,
108
+ 50338,
109
+ 50339,
110
+ 50340,
111
+ 50341,
112
+ 50342,
113
+ 50343,
114
+ 50344,
115
+ 50345,
116
+ 50346,
117
+ 50347,
118
+ 50348,
119
+ 50349,
120
+ 50350,
121
+ 50351,
122
+ 50352,
123
+ 50353,
124
+ 50354,
125
+ 50355,
126
+ 50356,
127
+ 50357
128
+ ],
129
+ "suppress_ids": [
130
+ 1,
131
+ 2,
132
+ 7,
133
+ 8,
134
+ 9,
135
+ 10,
136
+ 14,
137
+ 25,
138
+ 26,
139
+ 27,
140
+ 28,
141
+ 29,
142
+ 31,
143
+ 58,
144
+ 59,
145
+ 60,
146
+ 61,
147
+ 62,
148
+ 63,
149
+ 90,
150
+ 91,
151
+ 92,
152
+ 93,
153
+ 359,
154
+ 503,
155
+ 522,
156
+ 542,
157
+ 873,
158
+ 893,
159
+ 902,
160
+ 918,
161
+ 922,
162
+ 931,
163
+ 1350,
164
+ 1853,
165
+ 1982,
166
+ 2460,
167
+ 2627,
168
+ 3246,
169
+ 3253,
170
+ 3268,
171
+ 3536,
172
+ 3846,
173
+ 3961,
174
+ 4183,
175
+ 4667,
176
+ 6585,
177
+ 6647,
178
+ 7273,
179
+ 9061,
180
+ 9383,
181
+ 10428,
182
+ 10929,
183
+ 11938,
184
+ 12033,
185
+ 12331,
186
+ 12562,
187
+ 13793,
188
+ 14157,
189
+ 14635,
190
+ 15265,
191
+ 15618,
192
+ 16553,
193
+ 16604,
194
+ 18362,
195
+ 18956,
196
+ 20075,
197
+ 21675,
198
+ 22520,
199
+ 26130,
200
+ 26161,
201
+ 26435,
202
+ 28279,
203
+ 29464,
204
+ 31650,
205
+ 32302,
206
+ 32470,
207
+ 36865,
208
+ 42863,
209
+ 47425,
210
+ 49870,
211
+ 50254,
212
+ 50258,
213
+ 50358,
214
+ 50359,
215
+ 50360,
216
+ 50361,
217
+ 50362
218
+ ],
219
+ "suppress_ids_begin": [
220
+ 220,
221
+ 50257
222
+ ]
223
+ }
int8/model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:740271de10434ac3011c2a435f4205cf60aa90a56e915401454a22d7bffa8e98
3
+ size 784897343
int8/preprocessor_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "feature_extractor_type": "WhisperFeatureExtractor",
4
+ "feature_size": 80,
5
+ "hop_length": 160,
6
+ "n_fft": 400,
7
+ "n_samples": 480000,
8
+ "nb_max_frames": 3000,
9
+ "padding_side": "right",
10
+ "padding_value": 0.0,
11
+ "processor_class": "WhisperProcessor",
12
+ "return_attention_mask": false,
13
+ "sampling_rate": 16000
14
+ }
int8/tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
int8/vocabulary.json ADDED
The diff for this file is too large to render. See raw diff
 
int8/vocabulary.txt ADDED
The diff for this file is too large to render. See raw diff