Update config.json
Browse files- config.json +243 -6
config.json
CHANGED
@@ -1,4 +1,241 @@
|
|
1 |
{
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
"_name_or_path": "./",
|
3 |
"activation_dropout": 0.1,
|
4 |
"activation_function": "gelu",
|
@@ -6,7 +243,7 @@
|
|
6 |
"architectures": [
|
7 |
"WhisperForConditionalGeneration"
|
8 |
],
|
9 |
-
"attention_dropout": 0
|
10 |
"begin_suppress_tokens": [
|
11 |
220,
|
12 |
50257
|
@@ -16,13 +253,13 @@
|
|
16 |
"d_model": 768,
|
17 |
"decoder_attention_heads": 12,
|
18 |
"decoder_ffn_dim": 3072,
|
19 |
-
"decoder_layerdrop": 0
|
20 |
"decoder_layers": 12,
|
21 |
"decoder_start_token_id": 50258,
|
22 |
-
"dropout": 0
|
23 |
"encoder_attention_heads": 12,
|
24 |
"encoder_ffn_dim": 3072,
|
25 |
-
"encoder_layerdrop": 0
|
26 |
"encoder_layers": 12,
|
27 |
"eos_token_id": 50257,
|
28 |
"forced_decoder_ids": [
|
@@ -43,7 +280,7 @@
|
|
43 |
"is_encoder_decoder": true,
|
44 |
"mask_feature_length": 10,
|
45 |
"mask_feature_min_masks": 0,
|
46 |
-
"mask_feature_prob": 0
|
47 |
"mask_time_length": 10,
|
48 |
"mask_time_min_masks": 2,
|
49 |
"mask_time_prob": 0.05,
|
@@ -145,7 +382,7 @@
|
|
145 |
50362
|
146 |
],
|
147 |
"torch_dtype": "float32",
|
148 |
-
"transformers_version": "4.
|
149 |
"use_cache": true,
|
150 |
"use_weighted_layer_sum": false,
|
151 |
"vocab_size": 51865
|
|
|
1 |
{
|
2 |
+
"alignment_heads": [
|
3 |
+
[
|
4 |
+
5,
|
5 |
+
3
|
6 |
+
],
|
7 |
+
[
|
8 |
+
5,
|
9 |
+
9
|
10 |
+
],
|
11 |
+
[
|
12 |
+
8,
|
13 |
+
0
|
14 |
+
],
|
15 |
+
[
|
16 |
+
8,
|
17 |
+
4
|
18 |
+
],
|
19 |
+
[
|
20 |
+
8,
|
21 |
+
7
|
22 |
+
],
|
23 |
+
[
|
24 |
+
8,
|
25 |
+
8
|
26 |
+
],
|
27 |
+
[
|
28 |
+
9,
|
29 |
+
0
|
30 |
+
],
|
31 |
+
[
|
32 |
+
9,
|
33 |
+
7
|
34 |
+
],
|
35 |
+
[
|
36 |
+
9,
|
37 |
+
9
|
38 |
+
],
|
39 |
+
[
|
40 |
+
10,
|
41 |
+
5
|
42 |
+
]
|
43 |
+
],
|
44 |
+
"lang_ids": [
|
45 |
+
50259,
|
46 |
+
50260,
|
47 |
+
50261,
|
48 |
+
50262,
|
49 |
+
50263,
|
50 |
+
50264,
|
51 |
+
50265,
|
52 |
+
50266,
|
53 |
+
50267,
|
54 |
+
50268,
|
55 |
+
50269,
|
56 |
+
50270,
|
57 |
+
50271,
|
58 |
+
50272,
|
59 |
+
50273,
|
60 |
+
50274,
|
61 |
+
50275,
|
62 |
+
50276,
|
63 |
+
50277,
|
64 |
+
50278,
|
65 |
+
50279,
|
66 |
+
50280,
|
67 |
+
50281,
|
68 |
+
50282,
|
69 |
+
50283,
|
70 |
+
50284,
|
71 |
+
50285,
|
72 |
+
50286,
|
73 |
+
50287,
|
74 |
+
50288,
|
75 |
+
50289,
|
76 |
+
50290,
|
77 |
+
50291,
|
78 |
+
50292,
|
79 |
+
50293,
|
80 |
+
50294,
|
81 |
+
50295,
|
82 |
+
50296,
|
83 |
+
50297,
|
84 |
+
50298,
|
85 |
+
50299,
|
86 |
+
50300,
|
87 |
+
50301,
|
88 |
+
50302,
|
89 |
+
50303,
|
90 |
+
50304,
|
91 |
+
50305,
|
92 |
+
50306,
|
93 |
+
50307,
|
94 |
+
50308,
|
95 |
+
50309,
|
96 |
+
50310,
|
97 |
+
50311,
|
98 |
+
50312,
|
99 |
+
50313,
|
100 |
+
50314,
|
101 |
+
50315,
|
102 |
+
50316,
|
103 |
+
50317,
|
104 |
+
50318,
|
105 |
+
50319,
|
106 |
+
50320,
|
107 |
+
50321,
|
108 |
+
50322,
|
109 |
+
50323,
|
110 |
+
50324,
|
111 |
+
50325,
|
112 |
+
50326,
|
113 |
+
50327,
|
114 |
+
50328,
|
115 |
+
50329,
|
116 |
+
50330,
|
117 |
+
50331,
|
118 |
+
50332,
|
119 |
+
50333,
|
120 |
+
50334,
|
121 |
+
50335,
|
122 |
+
50336,
|
123 |
+
50337,
|
124 |
+
50338,
|
125 |
+
50339,
|
126 |
+
50340,
|
127 |
+
50341,
|
128 |
+
50342,
|
129 |
+
50343,
|
130 |
+
50344,
|
131 |
+
50345,
|
132 |
+
50346,
|
133 |
+
50347,
|
134 |
+
50348,
|
135 |
+
50349,
|
136 |
+
50350,
|
137 |
+
50351,
|
138 |
+
50352,
|
139 |
+
50353,
|
140 |
+
50354,
|
141 |
+
50355,
|
142 |
+
50356,
|
143 |
+
50357
|
144 |
+
],
|
145 |
+
"suppress_ids": [
|
146 |
+
1,
|
147 |
+
2,
|
148 |
+
7,
|
149 |
+
8,
|
150 |
+
9,
|
151 |
+
10,
|
152 |
+
14,
|
153 |
+
25,
|
154 |
+
26,
|
155 |
+
27,
|
156 |
+
28,
|
157 |
+
29,
|
158 |
+
31,
|
159 |
+
58,
|
160 |
+
59,
|
161 |
+
60,
|
162 |
+
61,
|
163 |
+
62,
|
164 |
+
63,
|
165 |
+
90,
|
166 |
+
91,
|
167 |
+
92,
|
168 |
+
93,
|
169 |
+
359,
|
170 |
+
503,
|
171 |
+
522,
|
172 |
+
542,
|
173 |
+
873,
|
174 |
+
893,
|
175 |
+
902,
|
176 |
+
918,
|
177 |
+
922,
|
178 |
+
931,
|
179 |
+
1350,
|
180 |
+
1853,
|
181 |
+
1982,
|
182 |
+
2460,
|
183 |
+
2627,
|
184 |
+
3246,
|
185 |
+
3253,
|
186 |
+
3268,
|
187 |
+
3536,
|
188 |
+
3846,
|
189 |
+
3961,
|
190 |
+
4183,
|
191 |
+
4667,
|
192 |
+
6585,
|
193 |
+
6647,
|
194 |
+
7273,
|
195 |
+
9061,
|
196 |
+
9383,
|
197 |
+
10428,
|
198 |
+
10929,
|
199 |
+
11938,
|
200 |
+
12033,
|
201 |
+
12331,
|
202 |
+
12562,
|
203 |
+
13793,
|
204 |
+
14157,
|
205 |
+
14635,
|
206 |
+
15265,
|
207 |
+
15618,
|
208 |
+
16553,
|
209 |
+
16604,
|
210 |
+
18362,
|
211 |
+
18956,
|
212 |
+
20075,
|
213 |
+
21675,
|
214 |
+
22520,
|
215 |
+
26130,
|
216 |
+
26161,
|
217 |
+
26435,
|
218 |
+
28279,
|
219 |
+
29464,
|
220 |
+
31650,
|
221 |
+
32302,
|
222 |
+
32470,
|
223 |
+
36865,
|
224 |
+
42863,
|
225 |
+
47425,
|
226 |
+
49870,
|
227 |
+
50254,
|
228 |
+
50258,
|
229 |
+
50358,
|
230 |
+
50359,
|
231 |
+
50360,
|
232 |
+
50361,
|
233 |
+
50362
|
234 |
+
],
|
235 |
+
"suppress_ids_begin": [
|
236 |
+
220,
|
237 |
+
50257
|
238 |
+
],
|
239 |
"_name_or_path": "./",
|
240 |
"activation_dropout": 0.1,
|
241 |
"activation_function": "gelu",
|
|
|
243 |
"architectures": [
|
244 |
"WhisperForConditionalGeneration"
|
245 |
],
|
246 |
+
"attention_dropout": 0,
|
247 |
"begin_suppress_tokens": [
|
248 |
220,
|
249 |
50257
|
|
|
253 |
"d_model": 768,
|
254 |
"decoder_attention_heads": 12,
|
255 |
"decoder_ffn_dim": 3072,
|
256 |
+
"decoder_layerdrop": 0,
|
257 |
"decoder_layers": 12,
|
258 |
"decoder_start_token_id": 50258,
|
259 |
+
"dropout": 0,
|
260 |
"encoder_attention_heads": 12,
|
261 |
"encoder_ffn_dim": 3072,
|
262 |
+
"encoder_layerdrop": 0,
|
263 |
"encoder_layers": 12,
|
264 |
"eos_token_id": 50257,
|
265 |
"forced_decoder_ids": [
|
|
|
280 |
"is_encoder_decoder": true,
|
281 |
"mask_feature_length": 10,
|
282 |
"mask_feature_min_masks": 0,
|
283 |
+
"mask_feature_prob": 0,
|
284 |
"mask_time_length": 10,
|
285 |
"mask_time_min_masks": 2,
|
286 |
"mask_time_prob": 0.05,
|
|
|
382 |
50362
|
383 |
],
|
384 |
"torch_dtype": "float32",
|
385 |
+
"transformers_version": "4.37.1",
|
386 |
"use_cache": true,
|
387 |
"use_weighted_layer_sum": false,
|
388 |
"vocab_size": 51865
|