abdouaziiz commited on
Commit
6d41b5d
1 Parent(s): df73031

Upload HubertForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +329 -0
  2. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "hubert-large-ls960-ft",
3
+ "activation_dropout": 0.1,
4
+ "apply_spec_augment": true,
5
+ "architectures": [
6
+ "HubertForSequenceClassification"
7
+ ],
8
+ "attention_dropout": 0.1,
9
+ "bos_token_id": 1,
10
+ "classifier_proj_size": 256,
11
+ "conv_bias": true,
12
+ "conv_dim": [
13
+ 512,
14
+ 512,
15
+ 512,
16
+ 512,
17
+ 512,
18
+ 512,
19
+ 512
20
+ ],
21
+ "conv_kernel": [
22
+ 10,
23
+ 3,
24
+ 3,
25
+ 3,
26
+ 3,
27
+ 2,
28
+ 2
29
+ ],
30
+ "conv_stride": [
31
+ 5,
32
+ 2,
33
+ 2,
34
+ 2,
35
+ 2,
36
+ 2,
37
+ 2
38
+ ],
39
+ "ctc_loss_reduction": "sum",
40
+ "ctc_zero_infinity": false,
41
+ "diversity_loss_weight": 0.1,
42
+ "do_stable_layer_norm": true,
43
+ "eos_token_id": 2,
44
+ "feat_extract_activation": "gelu",
45
+ "feat_extract_dropout": 0.0,
46
+ "feat_extract_norm": "layer",
47
+ "feat_proj_dropout": 0.1,
48
+ "feat_proj_layer_norm": true,
49
+ "final_dropout": 0.1,
50
+ "finetuning_task": "audio-classification",
51
+ "gradient_checkpointing": false,
52
+ "hidden_act": "gelu",
53
+ "hidden_dropout": 0.1,
54
+ "hidden_dropout_prob": 0.1,
55
+ "hidden_size": 1024,
56
+ "id2label": {
57
+ "0": "A canoon",
58
+ "1": "A cinj",
59
+ "2": "A keen",
60
+ "3": "A lanq",
61
+ "4": "A \u00f1aa\u01b4",
62
+ "5": "A \u00f1amaak",
63
+ "6": "Alaa",
64
+ "7": "Bacaac",
65
+ "8": "Benn",
66
+ "9": "B\u00e1lamuk",
67
+ "10": "B\u00fabaar",
68
+ "11": "Caggal",
69
+ "12": "Ceme",
70
+ "13": "Ci ginnaaw",
71
+ "14": "Ci kanam",
72
+ "15": "Ci kow",
73
+ "16": "Ci suuf",
74
+ "17": "C\u00e0mmo\u00f1",
75
+ "18": "Darnde",
76
+ "19": "Dow",
77
+ "20": "Doxal",
78
+ "21": "D\u00e9edet",
79
+ "22": "Eey",
80
+ "23": "Esuwa",
81
+ "24": "Eyen",
82
+ "25": "E\u00e9",
83
+ "26": "Fatiya",
84
+ "27": "Fukk",
85
+ "28": "Funoom",
86
+ "29": "Futok",
87
+ "30": "Futok di sibaakiir",
88
+ "31": "Futok di sigaba",
89
+ "32": "Futok di s\u00edfeejir",
90
+ "33": "Futok di y\u00e1kon",
91
+ "34": "F\u00e1cul",
92
+ "35": "Garab",
93
+ "36": "Goo",
94
+ "37": "Hani",
95
+ "38": "Jaay",
96
+ "39": "Jeegom",
97
+ "40": "Jeenay",
98
+ "41": "Jeetati",
99
+ "42": "Jee\u0257i\u0257i",
100
+ "43": "Jik",
101
+ "44": "Jiku",
102
+ "45": "Joy",
103
+ "46": "Juni",
104
+ "47": "Junne",
105
+ "48": "Juroom",
106
+ "49": "Juroom-benn",
107
+ "50": "Juroom-\u00f1aar",
108
+ "51": "Juroom-\u00f1eent",
109
+ "52": "Juroom-\u00f1ett",
110
+ "53": "J\u00ebnd",
111
+ "54": "Kakamben",
112
+ "55": "Kamay",
113
+ "56": "Kanoomen",
114
+ "57": "K\u00e1kambul",
115
+ "58": "K\u00e1rir",
116
+ "59": "Lal",
117
+ "60": "Lees",
118
+ "61": "Leng",
119
+ "62": "Le\u0257ki",
120
+ "63": "Li",
121
+ "64": "Mbaamir",
122
+ "65": "Mbalndi",
123
+ "66": "Nano",
124
+ "67": "Naxik",
125
+ "68": "Nay",
126
+ "69": "Ndaxar",
127
+ "70": "Ndeyjoor",
128
+ "71": "Ndiga",
129
+ "72": "Ndii\u01ad",
130
+ "73": "Njong",
131
+ "74": "O \u0253ox",
132
+ "75": "Picc",
133
+ "76": "Rawaandu",
134
+ "77": "Sappo",
135
+ "78": "Sibaakiir",
136
+ "79": "Sigaba",
137
+ "80": "Solndu",
138
+ "81": "Soodde",
139
+ "82": "S\u00edfeejir",
140
+ "83": "Tadik",
141
+ "84": "Tati",
142
+ "85": "Taxawal",
143
+ "86": "Teemedere",
144
+ "87": "Teemeed",
145
+ "88": "Tentaam",
146
+ "89": "Tik",
147
+ "90": "Took",
148
+ "91": "Tus",
149
+ "92": "T\u00e9emeer",
150
+ "93": "Ub /T\u00ebj",
151
+ "94": "Ub/T\u00ebj",
152
+ "95": "Ubbi /Tijji",
153
+ "96": "Udditde",
154
+ "97": "Uddude",
155
+ "98": "Ujaw",
156
+ "99": "Ujunere",
157
+ "100": "Ujuum",
158
+ "101": "U\u00f1en",
159
+ "102": "Waafulet",
160
+ "103": "Waaw",
161
+ "104": "Weg",
162
+ "105": "Wet",
163
+ "106": "W\u00fali",
164
+ "107": "Xa-aa",
165
+ "108": "Xaj",
166
+ "109": "Xar\u0253axay",
167
+ "110": "Yahdu",
168
+ "111": "Yeeso",
169
+ "112": "Yeeyde",
170
+ "113": "Y\u00e1kon",
171
+ "114": "\u00d1aamo",
172
+ "115": "\u00d1aar",
173
+ "116": "\u00d1eent",
174
+ "117": "\u00d1ett",
175
+ "118": "\u018ai\u0257i",
176
+ "119": "\u01a4etaa-fo-leng",
177
+ "120": "\u01a4etaa-naxak",
178
+ "121": "\u01a4etaa-tadak",
179
+ "122": "\u01a4etaa-\u01adaq",
180
+ "123": "\u01a4etik"
181
+ },
182
+ "initializer_range": 0.02,
183
+ "intermediate_size": 4096,
184
+ "label2id": {
185
+ "A canoon": "0",
186
+ "A cinj": "1",
187
+ "A keen": "2",
188
+ "A lanq": "3",
189
+ "A \u00f1aa\u01b4": "4",
190
+ "A \u00f1amaak": "5",
191
+ "Alaa": "6",
192
+ "Bacaac": "7",
193
+ "Benn": "8",
194
+ "B\u00e1lamuk": "9",
195
+ "B\u00fabaar": "10",
196
+ "Caggal": "11",
197
+ "Ceme": "12",
198
+ "Ci ginnaaw": "13",
199
+ "Ci kanam": "14",
200
+ "Ci kow": "15",
201
+ "Ci suuf": "16",
202
+ "C\u00e0mmo\u00f1": "17",
203
+ "Darnde": "18",
204
+ "Dow": "19",
205
+ "Doxal": "20",
206
+ "D\u00e9edet": "21",
207
+ "Eey": "22",
208
+ "Esuwa": "23",
209
+ "Eyen": "24",
210
+ "E\u00e9": "25",
211
+ "Fatiya": "26",
212
+ "Fukk": "27",
213
+ "Funoom": "28",
214
+ "Futok": "29",
215
+ "Futok di sibaakiir": "30",
216
+ "Futok di sigaba": "31",
217
+ "Futok di s\u00edfeejir": "32",
218
+ "Futok di y\u00e1kon": "33",
219
+ "F\u00e1cul": "34",
220
+ "Garab": "35",
221
+ "Goo": "36",
222
+ "Hani": "37",
223
+ "Jaay": "38",
224
+ "Jeegom": "39",
225
+ "Jeenay": "40",
226
+ "Jeetati": "41",
227
+ "Jee\u0257i\u0257i": "42",
228
+ "Jik": "43",
229
+ "Jiku": "44",
230
+ "Joy": "45",
231
+ "Juni": "46",
232
+ "Junne": "47",
233
+ "Juroom": "48",
234
+ "Juroom-benn": "49",
235
+ "Juroom-\u00f1aar": "50",
236
+ "Juroom-\u00f1eent": "51",
237
+ "Juroom-\u00f1ett": "52",
238
+ "J\u00ebnd": "53",
239
+ "Kakamben": "54",
240
+ "Kamay": "55",
241
+ "Kanoomen": "56",
242
+ "K\u00e1kambul": "57",
243
+ "K\u00e1rir": "58",
244
+ "Lal": "59",
245
+ "Lees": "60",
246
+ "Leng": "61",
247
+ "Le\u0257ki": "62",
248
+ "Li": "63",
249
+ "Mbaamir": "64",
250
+ "Mbalndi": "65",
251
+ "Nano": "66",
252
+ "Naxik": "67",
253
+ "Nay": "68",
254
+ "Ndaxar": "69",
255
+ "Ndeyjoor": "70",
256
+ "Ndiga": "71",
257
+ "Ndii\u01ad": "72",
258
+ "Njong": "73",
259
+ "O \u0253ox": "74",
260
+ "Picc": "75",
261
+ "Rawaandu": "76",
262
+ "Sappo": "77",
263
+ "Sibaakiir": "78",
264
+ "Sigaba": "79",
265
+ "Solndu": "80",
266
+ "Soodde": "81",
267
+ "S\u00edfeejir": "82",
268
+ "Tadik": "83",
269
+ "Tati": "84",
270
+ "Taxawal": "85",
271
+ "Teemedere": "86",
272
+ "Teemeed": "87",
273
+ "Tentaam": "88",
274
+ "Tik": "89",
275
+ "Took": "90",
276
+ "Tus": "91",
277
+ "T\u00e9emeer": "92",
278
+ "Ub /T\u00ebj": "93",
279
+ "Ub/T\u00ebj": "94",
280
+ "Ubbi /Tijji": "95",
281
+ "Udditde": "96",
282
+ "Uddude": "97",
283
+ "Ujaw": "98",
284
+ "Ujunere": "99",
285
+ "Ujuum": "100",
286
+ "U\u00f1en": "101",
287
+ "Waafulet": "102",
288
+ "Waaw": "103",
289
+ "Weg": "104",
290
+ "Wet": "105",
291
+ "W\u00fali": "106",
292
+ "Xa-aa": "107",
293
+ "Xaj": "108",
294
+ "Xar\u0253axay": "109",
295
+ "Yahdu": "110",
296
+ "Yeeso": "111",
297
+ "Yeeyde": "112",
298
+ "Y\u00e1kon": "113",
299
+ "\u00d1aamo": "114",
300
+ "\u00d1aar": "115",
301
+ "\u00d1eent": "116",
302
+ "\u00d1ett": "117",
303
+ "\u018ai\u0257i": "118",
304
+ "\u01a4etaa-fo-leng": "119",
305
+ "\u01a4etaa-naxak": "120",
306
+ "\u01a4etaa-tadak": "121",
307
+ "\u01a4etaa-\u01adaq": "122",
308
+ "\u01a4etik": "123"
309
+ },
310
+ "layer_norm_eps": 1e-05,
311
+ "layerdrop": 0.1,
312
+ "mask_feature_length": 10,
313
+ "mask_feature_min_masks": 0,
314
+ "mask_feature_prob": 0.0,
315
+ "mask_time_length": 10,
316
+ "mask_time_min_masks": 2,
317
+ "mask_time_prob": 0.05,
318
+ "model_type": "hubert",
319
+ "num_attention_heads": 16,
320
+ "num_conv_pos_embedding_groups": 16,
321
+ "num_conv_pos_embeddings": 128,
322
+ "num_feat_extract_layers": 7,
323
+ "num_hidden_layers": 24,
324
+ "pad_token_id": 0,
325
+ "torch_dtype": "float32",
326
+ "transformers_version": "4.27.0.dev0",
327
+ "use_weighted_layer_sum": false,
328
+ "vocab_size": 32
329
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19c393a81823806d84c0398c208462b588cad0a990ceabd76710e6fa15a8175c
3
+ size 1263069613