Convert dataset to Parquet

#1
by albertvillanova HF staff - opened
README.md CHANGED
@@ -1,3 +1,1424 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  # Disclaimer
2
  This is a tiny subset of the SUPERB dataset, which is intended only for demo purposes!
3
 
 
1
+ ---
2
+ dataset_info:
3
+ - config_name: asr
4
+ features:
5
+ - name: file
6
+ dtype: string
7
+ - name: audio
8
+ dtype:
9
+ audio:
10
+ sampling_rate: 16000
11
+ - name: text
12
+ dtype: string
13
+ - name: speaker_id
14
+ dtype: int64
15
+ - name: chapter_id
16
+ dtype: int64
17
+ - name: id
18
+ dtype: string
19
+ splits:
20
+ - name: test
21
+ num_bytes: 671089.0
22
+ num_examples: 6
23
+ download_size: 632857
24
+ dataset_size: 671089.0
25
+ - config_name: er
26
+ features:
27
+ - name: file
28
+ dtype: string
29
+ - name: audio
30
+ dtype:
31
+ audio:
32
+ sampling_rate: 16000
33
+ - name: label
34
+ dtype:
35
+ class_label:
36
+ names:
37
+ '0': neu
38
+ '1': hap
39
+ '2': ang
40
+ '3': sad
41
+ splits:
42
+ - name: session1
43
+ num_bytes: 1386599.0
44
+ num_examples: 6
45
+ download_size: 1389189
46
+ dataset_size: 1386599.0
47
+ - config_name: ic
48
+ features:
49
+ - name: file
50
+ dtype: string
51
+ - name: audio
52
+ dtype:
53
+ audio:
54
+ sampling_rate: 16000
55
+ - name: speaker_id
56
+ dtype: string
57
+ - name: text
58
+ dtype: string
59
+ - name: action
60
+ dtype:
61
+ class_label:
62
+ names:
63
+ '0': activate
64
+ '1': bring
65
+ '2': change language
66
+ '3': deactivate
67
+ '4': decrease
68
+ '5': increase
69
+ - name: object
70
+ dtype:
71
+ class_label:
72
+ names:
73
+ '0': Chinese
74
+ '1': English
75
+ '2': German
76
+ '3': Korean
77
+ '4': heat
78
+ '5': juice
79
+ '6': lamp
80
+ '7': lights
81
+ '8': music
82
+ '9': newspaper
83
+ '10': none
84
+ '11': shoes
85
+ '12': socks
86
+ '13': volume
87
+ - name: location
88
+ dtype:
89
+ class_label:
90
+ names:
91
+ '0': bedroom
92
+ '1': kitchen
93
+ '2': none
94
+ '3': washroom
95
+ splits:
96
+ - name: test
97
+ num_bytes: 526542.0
98
+ num_examples: 6
99
+ download_size: 466270
100
+ dataset_size: 526542.0
101
+ - config_name: ks
102
+ features:
103
+ - name: file
104
+ dtype: string
105
+ - name: audio
106
+ dtype:
107
+ audio:
108
+ sampling_rate: 16000
109
+ - name: label
110
+ dtype:
111
+ class_label:
112
+ names:
113
+ '0': 'yes'
114
+ '1': 'no'
115
+ '2': up
116
+ '3': down
117
+ '4': left
118
+ '5': right
119
+ '6': 'on'
120
+ '7': 'off'
121
+ '8': stop
122
+ '9': go
123
+ '10': _silence_
124
+ '11': _unknown_
125
+ splits:
126
+ - name: test
127
+ num_bytes: 257722.0
128
+ num_examples: 8
129
+ download_size: 253966
130
+ dataset_size: 257722.0
131
+ - config_name: si
132
+ features:
133
+ - name: file
134
+ dtype: string
135
+ - name: audio
136
+ dtype:
137
+ audio:
138
+ sampling_rate: 16000
139
+ - name: label
140
+ dtype:
141
+ class_label:
142
+ names:
143
+ '0': id10001
144
+ '1': id10002
145
+ '2': id10003
146
+ '3': id10004
147
+ '4': id10005
148
+ '5': id10006
149
+ '6': id10007
150
+ '7': id10008
151
+ '8': id10009
152
+ '9': id10010
153
+ '10': id10011
154
+ '11': id10012
155
+ '12': id10013
156
+ '13': id10014
157
+ '14': id10015
158
+ '15': id10016
159
+ '16': id10017
160
+ '17': id10018
161
+ '18': id10019
162
+ '19': id10020
163
+ '20': id10021
164
+ '21': id10022
165
+ '22': id10023
166
+ '23': id10024
167
+ '24': id10025
168
+ '25': id10026
169
+ '26': id10027
170
+ '27': id10028
171
+ '28': id10029
172
+ '29': id10030
173
+ '30': id10031
174
+ '31': id10032
175
+ '32': id10033
176
+ '33': id10034
177
+ '34': id10035
178
+ '35': id10036
179
+ '36': id10037
180
+ '37': id10038
181
+ '38': id10039
182
+ '39': id10040
183
+ '40': id10041
184
+ '41': id10042
185
+ '42': id10043
186
+ '43': id10044
187
+ '44': id10045
188
+ '45': id10046
189
+ '46': id10047
190
+ '47': id10048
191
+ '48': id10049
192
+ '49': id10050
193
+ '50': id10051
194
+ '51': id10052
195
+ '52': id10053
196
+ '53': id10054
197
+ '54': id10055
198
+ '55': id10056
199
+ '56': id10057
200
+ '57': id10058
201
+ '58': id10059
202
+ '59': id10060
203
+ '60': id10061
204
+ '61': id10062
205
+ '62': id10063
206
+ '63': id10064
207
+ '64': id10065
208
+ '65': id10066
209
+ '66': id10067
210
+ '67': id10068
211
+ '68': id10069
212
+ '69': id10070
213
+ '70': id10071
214
+ '71': id10072
215
+ '72': id10073
216
+ '73': id10074
217
+ '74': id10075
218
+ '75': id10076
219
+ '76': id10077
220
+ '77': id10078
221
+ '78': id10079
222
+ '79': id10080
223
+ '80': id10081
224
+ '81': id10082
225
+ '82': id10083
226
+ '83': id10084
227
+ '84': id10085
228
+ '85': id10086
229
+ '86': id10087
230
+ '87': id10088
231
+ '88': id10089
232
+ '89': id10090
233
+ '90': id10091
234
+ '91': id10092
235
+ '92': id10093
236
+ '93': id10094
237
+ '94': id10095
238
+ '95': id10096
239
+ '96': id10097
240
+ '97': id10098
241
+ '98': id10099
242
+ '99': id10100
243
+ '100': id10101
244
+ '101': id10102
245
+ '102': id10103
246
+ '103': id10104
247
+ '104': id10105
248
+ '105': id10106
249
+ '106': id10107
250
+ '107': id10108
251
+ '108': id10109
252
+ '109': id10110
253
+ '110': id10111
254
+ '111': id10112
255
+ '112': id10113
256
+ '113': id10114
257
+ '114': id10115
258
+ '115': id10116
259
+ '116': id10117
260
+ '117': id10118
261
+ '118': id10119
262
+ '119': id10120
263
+ '120': id10121
264
+ '121': id10122
265
+ '122': id10123
266
+ '123': id10124
267
+ '124': id10125
268
+ '125': id10126
269
+ '126': id10127
270
+ '127': id10128
271
+ '128': id10129
272
+ '129': id10130
273
+ '130': id10131
274
+ '131': id10132
275
+ '132': id10133
276
+ '133': id10134
277
+ '134': id10135
278
+ '135': id10136
279
+ '136': id10137
280
+ '137': id10138
281
+ '138': id10139
282
+ '139': id10140
283
+ '140': id10141
284
+ '141': id10142
285
+ '142': id10143
286
+ '143': id10144
287
+ '144': id10145
288
+ '145': id10146
289
+ '146': id10147
290
+ '147': id10148
291
+ '148': id10149
292
+ '149': id10150
293
+ '150': id10151
294
+ '151': id10152
295
+ '152': id10153
296
+ '153': id10154
297
+ '154': id10155
298
+ '155': id10156
299
+ '156': id10157
300
+ '157': id10158
301
+ '158': id10159
302
+ '159': id10160
303
+ '160': id10161
304
+ '161': id10162
305
+ '162': id10163
306
+ '163': id10164
307
+ '164': id10165
308
+ '165': id10166
309
+ '166': id10167
310
+ '167': id10168
311
+ '168': id10169
312
+ '169': id10170
313
+ '170': id10171
314
+ '171': id10172
315
+ '172': id10173
316
+ '173': id10174
317
+ '174': id10175
318
+ '175': id10176
319
+ '176': id10177
320
+ '177': id10178
321
+ '178': id10179
322
+ '179': id10180
323
+ '180': id10181
324
+ '181': id10182
325
+ '182': id10183
326
+ '183': id10184
327
+ '184': id10185
328
+ '185': id10186
329
+ '186': id10187
330
+ '187': id10188
331
+ '188': id10189
332
+ '189': id10190
333
+ '190': id10191
334
+ '191': id10192
335
+ '192': id10193
336
+ '193': id10194
337
+ '194': id10195
338
+ '195': id10196
339
+ '196': id10197
340
+ '197': id10198
341
+ '198': id10199
342
+ '199': id10200
343
+ '200': id10201
344
+ '201': id10202
345
+ '202': id10203
346
+ '203': id10204
347
+ '204': id10205
348
+ '205': id10206
349
+ '206': id10207
350
+ '207': id10208
351
+ '208': id10209
352
+ '209': id10210
353
+ '210': id10211
354
+ '211': id10212
355
+ '212': id10213
356
+ '213': id10214
357
+ '214': id10215
358
+ '215': id10216
359
+ '216': id10217
360
+ '217': id10218
361
+ '218': id10219
362
+ '219': id10220
363
+ '220': id10221
364
+ '221': id10222
365
+ '222': id10223
366
+ '223': id10224
367
+ '224': id10225
368
+ '225': id10226
369
+ '226': id10227
370
+ '227': id10228
371
+ '228': id10229
372
+ '229': id10230
373
+ '230': id10231
374
+ '231': id10232
375
+ '232': id10233
376
+ '233': id10234
377
+ '234': id10235
378
+ '235': id10236
379
+ '236': id10237
380
+ '237': id10238
381
+ '238': id10239
382
+ '239': id10240
383
+ '240': id10241
384
+ '241': id10242
385
+ '242': id10243
386
+ '243': id10244
387
+ '244': id10245
388
+ '245': id10246
389
+ '246': id10247
390
+ '247': id10248
391
+ '248': id10249
392
+ '249': id10250
393
+ '250': id10251
394
+ '251': id10252
395
+ '252': id10253
396
+ '253': id10254
397
+ '254': id10255
398
+ '255': id10256
399
+ '256': id10257
400
+ '257': id10258
401
+ '258': id10259
402
+ '259': id10260
403
+ '260': id10261
404
+ '261': id10262
405
+ '262': id10263
406
+ '263': id10264
407
+ '264': id10265
408
+ '265': id10266
409
+ '266': id10267
410
+ '267': id10268
411
+ '268': id10269
412
+ '269': id10270
413
+ '270': id10271
414
+ '271': id10272
415
+ '272': id10273
416
+ '273': id10274
417
+ '274': id10275
418
+ '275': id10276
419
+ '276': id10277
420
+ '277': id10278
421
+ '278': id10279
422
+ '279': id10280
423
+ '280': id10281
424
+ '281': id10282
425
+ '282': id10283
426
+ '283': id10284
427
+ '284': id10285
428
+ '285': id10286
429
+ '286': id10287
430
+ '287': id10288
431
+ '288': id10289
432
+ '289': id10290
433
+ '290': id10291
434
+ '291': id10292
435
+ '292': id10293
436
+ '293': id10294
437
+ '294': id10295
438
+ '295': id10296
439
+ '296': id10297
440
+ '297': id10298
441
+ '298': id10299
442
+ '299': id10300
443
+ '300': id10301
444
+ '301': id10302
445
+ '302': id10303
446
+ '303': id10304
447
+ '304': id10305
448
+ '305': id10306
449
+ '306': id10307
450
+ '307': id10308
451
+ '308': id10309
452
+ '309': id10310
453
+ '310': id10311
454
+ '311': id10312
455
+ '312': id10313
456
+ '313': id10314
457
+ '314': id10315
458
+ '315': id10316
459
+ '316': id10317
460
+ '317': id10318
461
+ '318': id10319
462
+ '319': id10320
463
+ '320': id10321
464
+ '321': id10322
465
+ '322': id10323
466
+ '323': id10324
467
+ '324': id10325
468
+ '325': id10326
469
+ '326': id10327
470
+ '327': id10328
471
+ '328': id10329
472
+ '329': id10330
473
+ '330': id10331
474
+ '331': id10332
475
+ '332': id10333
476
+ '333': id10334
477
+ '334': id10335
478
+ '335': id10336
479
+ '336': id10337
480
+ '337': id10338
481
+ '338': id10339
482
+ '339': id10340
483
+ '340': id10341
484
+ '341': id10342
485
+ '342': id10343
486
+ '343': id10344
487
+ '344': id10345
488
+ '345': id10346
489
+ '346': id10347
490
+ '347': id10348
491
+ '348': id10349
492
+ '349': id10350
493
+ '350': id10351
494
+ '351': id10352
495
+ '352': id10353
496
+ '353': id10354
497
+ '354': id10355
498
+ '355': id10356
499
+ '356': id10357
500
+ '357': id10358
501
+ '358': id10359
502
+ '359': id10360
503
+ '360': id10361
504
+ '361': id10362
505
+ '362': id10363
506
+ '363': id10364
507
+ '364': id10365
508
+ '365': id10366
509
+ '366': id10367
510
+ '367': id10368
511
+ '368': id10369
512
+ '369': id10370
513
+ '370': id10371
514
+ '371': id10372
515
+ '372': id10373
516
+ '373': id10374
517
+ '374': id10375
518
+ '375': id10376
519
+ '376': id10377
520
+ '377': id10378
521
+ '378': id10379
522
+ '379': id10380
523
+ '380': id10381
524
+ '381': id10382
525
+ '382': id10383
526
+ '383': id10384
527
+ '384': id10385
528
+ '385': id10386
529
+ '386': id10387
530
+ '387': id10388
531
+ '388': id10389
532
+ '389': id10390
533
+ '390': id10391
534
+ '391': id10392
535
+ '392': id10393
536
+ '393': id10394
537
+ '394': id10395
538
+ '395': id10396
539
+ '396': id10397
540
+ '397': id10398
541
+ '398': id10399
542
+ '399': id10400
543
+ '400': id10401
544
+ '401': id10402
545
+ '402': id10403
546
+ '403': id10404
547
+ '404': id10405
548
+ '405': id10406
549
+ '406': id10407
550
+ '407': id10408
551
+ '408': id10409
552
+ '409': id10410
553
+ '410': id10411
554
+ '411': id10412
555
+ '412': id10413
556
+ '413': id10414
557
+ '414': id10415
558
+ '415': id10416
559
+ '416': id10417
560
+ '417': id10418
561
+ '418': id10419
562
+ '419': id10420
563
+ '420': id10421
564
+ '421': id10422
565
+ '422': id10423
566
+ '423': id10424
567
+ '424': id10425
568
+ '425': id10426
569
+ '426': id10427
570
+ '427': id10428
571
+ '428': id10429
572
+ '429': id10430
573
+ '430': id10431
574
+ '431': id10432
575
+ '432': id10433
576
+ '433': id10434
577
+ '434': id10435
578
+ '435': id10436
579
+ '436': id10437
580
+ '437': id10438
581
+ '438': id10439
582
+ '439': id10440
583
+ '440': id10441
584
+ '441': id10442
585
+ '442': id10443
586
+ '443': id10444
587
+ '444': id10445
588
+ '445': id10446
589
+ '446': id10447
590
+ '447': id10448
591
+ '448': id10449
592
+ '449': id10450
593
+ '450': id10451
594
+ '451': id10452
595
+ '452': id10453
596
+ '453': id10454
597
+ '454': id10455
598
+ '455': id10456
599
+ '456': id10457
600
+ '457': id10458
601
+ '458': id10459
602
+ '459': id10460
603
+ '460': id10461
604
+ '461': id10462
605
+ '462': id10463
606
+ '463': id10464
607
+ '464': id10465
608
+ '465': id10466
609
+ '466': id10467
610
+ '467': id10468
611
+ '468': id10469
612
+ '469': id10470
613
+ '470': id10471
614
+ '471': id10472
615
+ '472': id10473
616
+ '473': id10474
617
+ '474': id10475
618
+ '475': id10476
619
+ '476': id10477
620
+ '477': id10478
621
+ '478': id10479
622
+ '479': id10480
623
+ '480': id10481
624
+ '481': id10482
625
+ '482': id10483
626
+ '483': id10484
627
+ '484': id10485
628
+ '485': id10486
629
+ '486': id10487
630
+ '487': id10488
631
+ '488': id10489
632
+ '489': id10490
633
+ '490': id10491
634
+ '491': id10492
635
+ '492': id10493
636
+ '493': id10494
637
+ '494': id10495
638
+ '495': id10496
639
+ '496': id10497
640
+ '497': id10498
641
+ '498': id10499
642
+ '499': id10500
643
+ '500': id10501
644
+ '501': id10502
645
+ '502': id10503
646
+ '503': id10504
647
+ '504': id10505
648
+ '505': id10506
649
+ '506': id10507
650
+ '507': id10508
651
+ '508': id10509
652
+ '509': id10510
653
+ '510': id10511
654
+ '511': id10512
655
+ '512': id10513
656
+ '513': id10514
657
+ '514': id10515
658
+ '515': id10516
659
+ '516': id10517
660
+ '517': id10518
661
+ '518': id10519
662
+ '519': id10520
663
+ '520': id10521
664
+ '521': id10522
665
+ '522': id10523
666
+ '523': id10524
667
+ '524': id10525
668
+ '525': id10526
669
+ '526': id10527
670
+ '527': id10528
671
+ '528': id10529
672
+ '529': id10530
673
+ '530': id10531
674
+ '531': id10532
675
+ '532': id10533
676
+ '533': id10534
677
+ '534': id10535
678
+ '535': id10536
679
+ '536': id10537
680
+ '537': id10538
681
+ '538': id10539
682
+ '539': id10540
683
+ '540': id10541
684
+ '541': id10542
685
+ '542': id10543
686
+ '543': id10544
687
+ '544': id10545
688
+ '545': id10546
689
+ '546': id10547
690
+ '547': id10548
691
+ '548': id10549
692
+ '549': id10550
693
+ '550': id10551
694
+ '551': id10552
695
+ '552': id10553
696
+ '553': id10554
697
+ '554': id10555
698
+ '555': id10556
699
+ '556': id10557
700
+ '557': id10558
701
+ '558': id10559
702
+ '559': id10560
703
+ '560': id10561
704
+ '561': id10562
705
+ '562': id10563
706
+ '563': id10564
707
+ '564': id10565
708
+ '565': id10566
709
+ '566': id10567
710
+ '567': id10568
711
+ '568': id10569
712
+ '569': id10570
713
+ '570': id10571
714
+ '571': id10572
715
+ '572': id10573
716
+ '573': id10574
717
+ '574': id10575
718
+ '575': id10576
719
+ '576': id10577
720
+ '577': id10578
721
+ '578': id10579
722
+ '579': id10580
723
+ '580': id10581
724
+ '581': id10582
725
+ '582': id10583
726
+ '583': id10584
727
+ '584': id10585
728
+ '585': id10586
729
+ '586': id10587
730
+ '587': id10588
731
+ '588': id10589
732
+ '589': id10590
733
+ '590': id10591
734
+ '591': id10592
735
+ '592': id10593
736
+ '593': id10594
737
+ '594': id10595
738
+ '595': id10596
739
+ '596': id10597
740
+ '597': id10598
741
+ '598': id10599
742
+ '599': id10600
743
+ '600': id10601
744
+ '601': id10602
745
+ '602': id10603
746
+ '603': id10604
747
+ '604': id10605
748
+ '605': id10606
749
+ '606': id10607
750
+ '607': id10608
751
+ '608': id10609
752
+ '609': id10610
753
+ '610': id10611
754
+ '611': id10612
755
+ '612': id10613
756
+ '613': id10614
757
+ '614': id10615
758
+ '615': id10616
759
+ '616': id10617
760
+ '617': id10618
761
+ '618': id10619
762
+ '619': id10620
763
+ '620': id10621
764
+ '621': id10622
765
+ '622': id10623
766
+ '623': id10624
767
+ '624': id10625
768
+ '625': id10626
769
+ '626': id10627
770
+ '627': id10628
771
+ '628': id10629
772
+ '629': id10630
773
+ '630': id10631
774
+ '631': id10632
775
+ '632': id10633
776
+ '633': id10634
777
+ '634': id10635
778
+ '635': id10636
779
+ '636': id10637
780
+ '637': id10638
781
+ '638': id10639
782
+ '639': id10640
783
+ '640': id10641
784
+ '641': id10642
785
+ '642': id10643
786
+ '643': id10644
787
+ '644': id10645
788
+ '645': id10646
789
+ '646': id10647
790
+ '647': id10648
791
+ '648': id10649
792
+ '649': id10650
793
+ '650': id10651
794
+ '651': id10652
795
+ '652': id10653
796
+ '653': id10654
797
+ '654': id10655
798
+ '655': id10656
799
+ '656': id10657
800
+ '657': id10658
801
+ '658': id10659
802
+ '659': id10660
803
+ '660': id10661
804
+ '661': id10662
805
+ '662': id10663
806
+ '663': id10664
807
+ '664': id10665
808
+ '665': id10666
809
+ '666': id10667
810
+ '667': id10668
811
+ '668': id10669
812
+ '669': id10670
813
+ '670': id10671
814
+ '671': id10672
815
+ '672': id10673
816
+ '673': id10674
817
+ '674': id10675
818
+ '675': id10676
819
+ '676': id10677
820
+ '677': id10678
821
+ '678': id10679
822
+ '679': id10680
823
+ '680': id10681
824
+ '681': id10682
825
+ '682': id10683
826
+ '683': id10684
827
+ '684': id10685
828
+ '685': id10686
829
+ '686': id10687
830
+ '687': id10688
831
+ '688': id10689
832
+ '689': id10690
833
+ '690': id10691
834
+ '691': id10692
835
+ '692': id10693
836
+ '693': id10694
837
+ '694': id10695
838
+ '695': id10696
839
+ '696': id10697
840
+ '697': id10698
841
+ '698': id10699
842
+ '699': id10700
843
+ '700': id10701
844
+ '701': id10702
845
+ '702': id10703
846
+ '703': id10704
847
+ '704': id10705
848
+ '705': id10706
849
+ '706': id10707
850
+ '707': id10708
851
+ '708': id10709
852
+ '709': id10710
853
+ '710': id10711
854
+ '711': id10712
855
+ '712': id10713
856
+ '713': id10714
857
+ '714': id10715
858
+ '715': id10716
859
+ '716': id10717
860
+ '717': id10718
861
+ '718': id10719
862
+ '719': id10720
863
+ '720': id10721
864
+ '721': id10722
865
+ '722': id10723
866
+ '723': id10724
867
+ '724': id10725
868
+ '725': id10726
869
+ '726': id10727
870
+ '727': id10728
871
+ '728': id10729
872
+ '729': id10730
873
+ '730': id10731
874
+ '731': id10732
875
+ '732': id10733
876
+ '733': id10734
877
+ '734': id10735
878
+ '735': id10736
879
+ '736': id10737
880
+ '737': id10738
881
+ '738': id10739
882
+ '739': id10740
883
+ '740': id10741
884
+ '741': id10742
885
+ '742': id10743
886
+ '743': id10744
887
+ '744': id10745
888
+ '745': id10746
889
+ '746': id10747
890
+ '747': id10748
891
+ '748': id10749
892
+ '749': id10750
893
+ '750': id10751
894
+ '751': id10752
895
+ '752': id10753
896
+ '753': id10754
897
+ '754': id10755
898
+ '755': id10756
899
+ '756': id10757
900
+ '757': id10758
901
+ '758': id10759
902
+ '759': id10760
903
+ '760': id10761
904
+ '761': id10762
905
+ '762': id10763
906
+ '763': id10764
907
+ '764': id10765
908
+ '765': id10766
909
+ '766': id10767
910
+ '767': id10768
911
+ '768': id10769
912
+ '769': id10770
913
+ '770': id10771
914
+ '771': id10772
915
+ '772': id10773
916
+ '773': id10774
917
+ '774': id10775
918
+ '775': id10776
919
+ '776': id10777
920
+ '777': id10778
921
+ '778': id10779
922
+ '779': id10780
923
+ '780': id10781
924
+ '781': id10782
925
+ '782': id10783
926
+ '783': id10784
927
+ '784': id10785
928
+ '785': id10786
929
+ '786': id10787
930
+ '787': id10788
931
+ '788': id10789
932
+ '789': id10790
933
+ '790': id10791
934
+ '791': id10792
935
+ '792': id10793
936
+ '793': id10794
937
+ '794': id10795
938
+ '795': id10796
939
+ '796': id10797
940
+ '797': id10798
941
+ '798': id10799
942
+ '799': id10800
943
+ '800': id10801
944
+ '801': id10802
945
+ '802': id10803
946
+ '803': id10804
947
+ '804': id10805
948
+ '805': id10806
949
+ '806': id10807
950
+ '807': id10808
951
+ '808': id10809
952
+ '809': id10810
953
+ '810': id10811
954
+ '811': id10812
955
+ '812': id10813
956
+ '813': id10814
957
+ '814': id10815
958
+ '815': id10816
959
+ '816': id10817
960
+ '817': id10818
961
+ '818': id10819
962
+ '819': id10820
963
+ '820': id10821
964
+ '821': id10822
965
+ '822': id10823
966
+ '823': id10824
967
+ '824': id10825
968
+ '825': id10826
969
+ '826': id10827
970
+ '827': id10828
971
+ '828': id10829
972
+ '829': id10830
973
+ '830': id10831
974
+ '831': id10832
975
+ '832': id10833
976
+ '833': id10834
977
+ '834': id10835
978
+ '835': id10836
979
+ '836': id10837
980
+ '837': id10838
981
+ '838': id10839
982
+ '839': id10840
983
+ '840': id10841
984
+ '841': id10842
985
+ '842': id10843
986
+ '843': id10844
987
+ '844': id10845
988
+ '845': id10846
989
+ '846': id10847
990
+ '847': id10848
991
+ '848': id10849
992
+ '849': id10850
993
+ '850': id10851
994
+ '851': id10852
995
+ '852': id10853
996
+ '853': id10854
997
+ '854': id10855
998
+ '855': id10856
999
+ '856': id10857
1000
+ '857': id10858
1001
+ '858': id10859
1002
+ '859': id10860
1003
+ '860': id10861
1004
+ '861': id10862
1005
+ '862': id10863
1006
+ '863': id10864
1007
+ '864': id10865
1008
+ '865': id10866
1009
+ '866': id10867
1010
+ '867': id10868
1011
+ '868': id10869
1012
+ '869': id10870
1013
+ '870': id10871
1014
+ '871': id10872
1015
+ '872': id10873
1016
+ '873': id10874
1017
+ '874': id10875
1018
+ '875': id10876
1019
+ '876': id10877
1020
+ '877': id10878
1021
+ '878': id10879
1022
+ '879': id10880
1023
+ '880': id10881
1024
+ '881': id10882
1025
+ '882': id10883
1026
+ '883': id10884
1027
+ '884': id10885
1028
+ '885': id10886
1029
+ '886': id10887
1030
+ '887': id10888
1031
+ '888': id10889
1032
+ '889': id10890
1033
+ '890': id10891
1034
+ '891': id10892
1035
+ '892': id10893
1036
+ '893': id10894
1037
+ '894': id10895
1038
+ '895': id10896
1039
+ '896': id10897
1040
+ '897': id10898
1041
+ '898': id10899
1042
+ '899': id10900
1043
+ '900': id10901
1044
+ '901': id10902
1045
+ '902': id10903
1046
+ '903': id10904
1047
+ '904': id10905
1048
+ '905': id10906
1049
+ '906': id10907
1050
+ '907': id10908
1051
+ '908': id10909
1052
+ '909': id10910
1053
+ '910': id10911
1054
+ '911': id10912
1055
+ '912': id10913
1056
+ '913': id10914
1057
+ '914': id10915
1058
+ '915': id10916
1059
+ '916': id10917
1060
+ '917': id10918
1061
+ '918': id10919
1062
+ '919': id10920
1063
+ '920': id10921
1064
+ '921': id10922
1065
+ '922': id10923
1066
+ '923': id10924
1067
+ '924': id10925
1068
+ '925': id10926
1069
+ '926': id10927
1070
+ '927': id10928
1071
+ '928': id10929
1072
+ '929': id10930
1073
+ '930': id10931
1074
+ '931': id10932
1075
+ '932': id10933
1076
+ '933': id10934
1077
+ '934': id10935
1078
+ '935': id10936
1079
+ '936': id10937
1080
+ '937': id10938
1081
+ '938': id10939
1082
+ '939': id10940
1083
+ '940': id10941
1084
+ '941': id10942
1085
+ '942': id10943
1086
+ '943': id10944
1087
+ '944': id10945
1088
+ '945': id10946
1089
+ '946': id10947
1090
+ '947': id10948
1091
+ '948': id10949
1092
+ '949': id10950
1093
+ '950': id10951
1094
+ '951': id10952
1095
+ '952': id10953
1096
+ '953': id10954
1097
+ '954': id10955
1098
+ '955': id10956
1099
+ '956': id10957
1100
+ '957': id10958
1101
+ '958': id10959
1102
+ '959': id10960
1103
+ '960': id10961
1104
+ '961': id10962
1105
+ '962': id10963
1106
+ '963': id10964
1107
+ '964': id10965
1108
+ '965': id10966
1109
+ '966': id10967
1110
+ '967': id10968
1111
+ '968': id10969
1112
+ '969': id10970
1113
+ '970': id10971
1114
+ '971': id10972
1115
+ '972': id10973
1116
+ '973': id10974
1117
+ '974': id10975
1118
+ '975': id10976
1119
+ '976': id10977
1120
+ '977': id10978
1121
+ '978': id10979
1122
+ '979': id10980
1123
+ '980': id10981
1124
+ '981': id10982
1125
+ '982': id10983
1126
+ '983': id10984
1127
+ '984': id10985
1128
+ '985': id10986
1129
+ '986': id10987
1130
+ '987': id10988
1131
+ '988': id10989
1132
+ '989': id10990
1133
+ '990': id10991
1134
+ '991': id10992
1135
+ '992': id10993
1136
+ '993': id10994
1137
+ '994': id10995
1138
+ '995': id10996
1139
+ '996': id10997
1140
+ '997': id10998
1141
+ '998': id10999
1142
+ '999': id11000
1143
+ '1000': id11001
1144
+ '1001': id11002
1145
+ '1002': id11003
1146
+ '1003': id11004
1147
+ '1004': id11005
1148
+ '1005': id11006
1149
+ '1006': id11007
1150
+ '1007': id11008
1151
+ '1008': id11009
1152
+ '1009': id11010
1153
+ '1010': id11011
1154
+ '1011': id11012
1155
+ '1012': id11013
1156
+ '1013': id11014
1157
+ '1014': id11015
1158
+ '1015': id11016
1159
+ '1016': id11017
1160
+ '1017': id11018
1161
+ '1018': id11019
1162
+ '1019': id11020
1163
+ '1020': id11021
1164
+ '1021': id11022
1165
+ '1022': id11023
1166
+ '1023': id11024
1167
+ '1024': id11025
1168
+ '1025': id11026
1169
+ '1026': id11027
1170
+ '1027': id11028
1171
+ '1028': id11029
1172
+ '1029': id11030
1173
+ '1030': id11031
1174
+ '1031': id11032
1175
+ '1032': id11033
1176
+ '1033': id11034
1177
+ '1034': id11035
1178
+ '1035': id11036
1179
+ '1036': id11037
1180
+ '1037': id11038
1181
+ '1038': id11039
1182
+ '1039': id11040
1183
+ '1040': id11041
1184
+ '1041': id11042
1185
+ '1042': id11043
1186
+ '1043': id11044
1187
+ '1044': id11045
1188
+ '1045': id11046
1189
+ '1046': id11047
1190
+ '1047': id11048
1191
+ '1048': id11049
1192
+ '1049': id11050
1193
+ '1050': id11051
1194
+ '1051': id11052
1195
+ '1052': id11053
1196
+ '1053': id11054
1197
+ '1054': id11055
1198
+ '1055': id11056
1199
+ '1056': id11057
1200
+ '1057': id11058
1201
+ '1058': id11059
1202
+ '1059': id11060
1203
+ '1060': id11061
1204
+ '1061': id11062
1205
+ '1062': id11063
1206
+ '1063': id11064
1207
+ '1064': id11065
1208
+ '1065': id11066
1209
+ '1066': id11067
1210
+ '1067': id11068
1211
+ '1068': id11069
1212
+ '1069': id11070
1213
+ '1070': id11071
1214
+ '1071': id11072
1215
+ '1072': id11073
1216
+ '1073': id11074
1217
+ '1074': id11075
1218
+ '1075': id11076
1219
+ '1076': id11077
1220
+ '1077': id11078
1221
+ '1078': id11079
1222
+ '1079': id11080
1223
+ '1080': id11081
1224
+ '1081': id11082
1225
+ '1082': id11083
1226
+ '1083': id11084
1227
+ '1084': id11085
1228
+ '1085': id11086
1229
+ '1086': id11087
1230
+ '1087': id11088
1231
+ '1088': id11089
1232
+ '1089': id11090
1233
+ '1090': id11091
1234
+ '1091': id11092
1235
+ '1092': id11093
1236
+ '1093': id11094
1237
+ '1094': id11095
1238
+ '1095': id11096
1239
+ '1096': id11097
1240
+ '1097': id11098
1241
+ '1098': id11099
1242
+ '1099': id11100
1243
+ '1100': id11101
1244
+ '1101': id11102
1245
+ '1102': id11103
1246
+ '1103': id11104
1247
+ '1104': id11105
1248
+ '1105': id11106
1249
+ '1106': id11107
1250
+ '1107': id11108
1251
+ '1108': id11109
1252
+ '1109': id11110
1253
+ '1110': id11111
1254
+ '1111': id11112
1255
+ '1112': id11113
1256
+ '1113': id11114
1257
+ '1114': id11115
1258
+ '1115': id11116
1259
+ '1116': id11117
1260
+ '1117': id11118
1261
+ '1118': id11119
1262
+ '1119': id11120
1263
+ '1120': id11121
1264
+ '1121': id11122
1265
+ '1122': id11123
1266
+ '1123': id11124
1267
+ '1124': id11125
1268
+ '1125': id11126
1269
+ '1126': id11127
1270
+ '1127': id11128
1271
+ '1128': id11129
1272
+ '1129': id11130
1273
+ '1130': id11131
1274
+ '1131': id11132
1275
+ '1132': id11133
1276
+ '1133': id11134
1277
+ '1134': id11135
1278
+ '1135': id11136
1279
+ '1136': id11137
1280
+ '1137': id11138
1281
+ '1138': id11139
1282
+ '1139': id11140
1283
+ '1140': id11141
1284
+ '1141': id11142
1285
+ '1142': id11143
1286
+ '1143': id11144
1287
+ '1144': id11145
1288
+ '1145': id11146
1289
+ '1146': id11147
1290
+ '1147': id11148
1291
+ '1148': id11149
1292
+ '1149': id11150
1293
+ '1150': id11151
1294
+ '1151': id11152
1295
+ '1152': id11153
1296
+ '1153': id11154
1297
+ '1154': id11155
1298
+ '1155': id11156
1299
+ '1156': id11157
1300
+ '1157': id11158
1301
+ '1158': id11159
1302
+ '1159': id11160
1303
+ '1160': id11161
1304
+ '1161': id11162
1305
+ '1162': id11163
1306
+ '1163': id11164
1307
+ '1164': id11165
1308
+ '1165': id11166
1309
+ '1166': id11167
1310
+ '1167': id11168
1311
+ '1168': id11169
1312
+ '1169': id11170
1313
+ '1170': id11171
1314
+ '1171': id11172
1315
+ '1172': id11173
1316
+ '1173': id11174
1317
+ '1174': id11175
1318
+ '1175': id11176
1319
+ '1176': id11177
1320
+ '1177': id11178
1321
+ '1178': id11179
1322
+ '1179': id11180
1323
+ '1180': id11181
1324
+ '1181': id11182
1325
+ '1182': id11183
1326
+ '1183': id11184
1327
+ '1184': id11185
1328
+ '1185': id11186
1329
+ '1186': id11187
1330
+ '1187': id11188
1331
+ '1188': id11189
1332
+ '1189': id11190
1333
+ '1190': id11191
1334
+ '1191': id11192
1335
+ '1192': id11193
1336
+ '1193': id11194
1337
+ '1194': id11195
1338
+ '1195': id11196
1339
+ '1196': id11197
1340
+ '1197': id11198
1341
+ '1198': id11199
1342
+ '1199': id11200
1343
+ '1200': id11201
1344
+ '1201': id11202
1345
+ '1202': id11203
1346
+ '1203': id11204
1347
+ '1204': id11205
1348
+ '1205': id11206
1349
+ '1206': id11207
1350
+ '1207': id11208
1351
+ '1208': id11209
1352
+ '1209': id11210
1353
+ '1210': id11211
1354
+ '1211': id11212
1355
+ '1212': id11213
1356
+ '1213': id11214
1357
+ '1214': id11215
1358
+ '1215': id11216
1359
+ '1216': id11217
1360
+ '1217': id11218
1361
+ '1218': id11219
1362
+ '1219': id11220
1363
+ '1220': id11221
1364
+ '1221': id11222
1365
+ '1222': id11223
1366
+ '1223': id11224
1367
+ '1224': id11225
1368
+ '1225': id11226
1369
+ '1226': id11227
1370
+ '1227': id11228
1371
+ '1228': id11229
1372
+ '1229': id11230
1373
+ '1230': id11231
1374
+ '1231': id11232
1375
+ '1232': id11233
1376
+ '1233': id11234
1377
+ '1234': id11235
1378
+ '1235': id11236
1379
+ '1236': id11237
1380
+ '1237': id11238
1381
+ '1238': id11239
1382
+ '1239': id11240
1383
+ '1240': id11241
1384
+ '1241': id11242
1385
+ '1242': id11243
1386
+ '1243': id11244
1387
+ '1244': id11245
1388
+ '1245': id11246
1389
+ '1246': id11247
1390
+ '1247': id11248
1391
+ '1248': id11249
1392
+ '1249': id11250
1393
+ '1250': id11251
1394
+ splits:
1395
+ - name: test
1396
+ num_bytes: 1798447.0
1397
+ num_examples: 6
1398
+ download_size: 1832975
1399
+ dataset_size: 1798447.0
1400
+ configs:
1401
+ - config_name: asr
1402
+ data_files:
1403
+ - split: test
1404
+ path: asr/test-*
1405
+ - config_name: er
1406
+ data_files:
1407
+ - split: session1
1408
+ path: er/session1-*
1409
+ - config_name: ic
1410
+ data_files:
1411
+ - split: test
1412
+ path: ic/test-*
1413
+ - config_name: ks
1414
+ data_files:
1415
+ - split: test
1416
+ path: ks/test-*
1417
+ - config_name: si
1418
+ data_files:
1419
+ - split: test
1420
+ path: si/test-*
1421
+ ---
1422
  # Disclaimer
1423
  This is a tiny subset of the SUPERB dataset, which is intended only for demo purposes!
1424
 
data/speech_commands_test_set_v0.01.zip → asr/test-00000-of-00001.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:026ed5d467c50a07dec3ada87a9d833ba9d21cd7367721d3dcb08a28482d4c06
3
- size 211385
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c62f287a74c9e2d8b8eafecc5b289be62ea63ff5fb0e274f6ced9b10d08244db
3
+ size 632857
data/IEMOCAP_full_release.zip → er/session1-00000-of-00001.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:99da5de585066b100f2a16b8960a350a6620fa2487f1127e969198f7d7f9bcba
3
- size 1209515
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7da39fd893144fae1d40d55cc0cba4441ccfac64db7b3990d5e7dbbc63787cc
3
+ size 1389189
data/LibriSpeech-test-clean.zip → ic/test-00000-of-00001.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a59633668a54ac2fbd99283afa291be3c3db130a1cf36687e18d8876db9f2df1
3
- size 626257
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a026dd37e154c3965d1feab04ff612c094991ce23f0dd330fb756dbf58713567
3
+ size 466270
data/fluent_speech_commands_dataset.zip → ks/test-00000-of-00001.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d0ef6e970baffb1b6ca6f370c07240bdcd0dd32b1436426fa025019c00d9894
3
- size 494518
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57daf5ffe13c52b6c42962a8d24f83926d96ca7bfe950291a85abe383eed41e5
3
+ size 253966
data/VoxCeleb1.zip → si/test-00000-of-00001.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:539e1de8d0158ab7cb7fbb7dd793bab99bada17319038e31bccfbed16c9b2219
3
- size 1512582
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f7ee6b215237e9f59be863d36fbeb2690dfbfda3f33c994fd38e1f4a45086b3
3
+ size 1832975
superb_demo.py DELETED
@@ -1,435 +0,0 @@
1
- # coding=utf-8
2
- # Copyright 2021 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- # Lint as: python3
17
- """SUPERB: Speech processing Universal PERformance Benchmark."""
18
-
19
- import csv
20
- import glob
21
- import os
22
- import textwrap
23
-
24
- import datasets
25
- from datasets.tasks import AutomaticSpeechRecognition
26
-
27
- _CITATION = """\
28
- @article{DBLP:journals/corr/abs-2105-01051,
29
- author = {Shu{-}Wen Yang and
30
- Po{-}Han Chi and
31
- Yung{-}Sung Chuang and
32
- Cheng{-}I Jeff Lai and
33
- Kushal Lakhotia and
34
- Yist Y. Lin and
35
- Andy T. Liu and
36
- Jiatong Shi and
37
- Xuankai Chang and
38
- Guan{-}Ting Lin and
39
- Tzu{-}Hsien Huang and
40
- Wei{-}Cheng Tseng and
41
- Ko{-}tik Lee and
42
- Da{-}Rong Liu and
43
- Zili Huang and
44
- Shuyan Dong and
45
- Shang{-}Wen Li and
46
- Shinji Watanabe and
47
- Abdelrahman Mohamed and
48
- Hung{-}yi Lee},
49
- title = {{SUPERB:} Speech processing Universal PERformance Benchmark},
50
- journal = {CoRR},
51
- volume = {abs/2105.01051},
52
- year = {2021},
53
- url = {https://arxiv.org/abs/2105.01051},
54
- archivePrefix = {arXiv},
55
- eprint = {2105.01051},
56
- timestamp = {Thu, 01 Jul 2021 13:30:22 +0200},
57
- biburl = {https://dblp.org/rec/journals/corr/abs-2105-01051.bib},
58
- bibsource = {dblp computer science bibliography, https://dblp.org}
59
- }
60
- """
61
-
62
- _DESCRIPTION = """\
63
- Self-supervised learning (SSL) has proven vital for advancing research in
64
- natural language processing (NLP) and computer vision (CV). The paradigm
65
- pretrains a shared model on large volumes of unlabeled data and achieves
66
- state-of-the-art (SOTA) for various tasks with minimal adaptation. However, the
67
- speech processing community lacks a similar setup to systematically explore the
68
- paradigm. To bridge this gap, we introduce Speech processing Universal
69
- PERformance Benchmark (SUPERB). SUPERB is a leaderboard to benchmark the
70
- performance of a shared model across a wide range of speech processing tasks
71
- with minimal architecture changes and labeled data. Among multiple usages of the
72
- shared model, we especially focus on extracting the representation learned from
73
- SSL due to its preferable re-usability. We present a simple framework to solve
74
- SUPERB tasks by learning task-specialized lightweight prediction heads on top of
75
- the frozen shared model. Our results demonstrate that the framework is promising
76
- as SSL representations show competitive generalizability and accessibility
77
- across SUPERB tasks. We release SUPERB as a challenge with a leaderboard and a
78
- benchmark toolkit to fuel the research in representation learning and general
79
- speech processing.
80
-
81
- Note that in order to limit the required storage for preparing this dataset, the
82
- audio is stored in the .flac format and is not converted to a float32 array. To
83
- convert, the audio file to a float32 array, please make use of the `.map()`
84
- function as follows:
85
-
86
-
87
- ```python
88
- import soundfile as sf
89
-
90
- def map_to_array(batch):
91
- speech_array, _ = sf.read(batch["file"])
92
- batch["speech"] = speech_array
93
- return batch
94
-
95
- dataset = dataset.map(map_to_array, remove_columns=["file"])
96
- ```
97
- """
98
-
99
-
100
- class SuperbConfig(datasets.BuilderConfig):
101
- """BuilderConfig for Superb."""
102
-
103
- def __init__(
104
- self,
105
- features,
106
- url,
107
- data_url=None,
108
- supervised_keys=None,
109
- task_templates=None,
110
- **kwargs,
111
- ):
112
- super().__init__(version=datasets.Version("1.9.0", ""), **kwargs)
113
- self.features = features
114
- self.data_url = data_url
115
- self.url = url
116
- self.supervised_keys = supervised_keys
117
- self.task_templates = task_templates
118
-
119
-
120
- class Superb(datasets.GeneratorBasedBuilder):
121
- """Superb dataset."""
122
-
123
- BUILDER_CONFIGS = [
124
- SuperbConfig(
125
- name="asr",
126
- description=textwrap.dedent(
127
- """\
128
- ASR transcribes utterances into words. While PR analyzes the
129
- improvement in modeling phonetics, ASR reflects the significance of
130
- the improvement in a real-world scenario. LibriSpeech
131
- train-clean-100/dev-clean/test-clean subsets are used for
132
- training/validation/testing. The evaluation metric is word error
133
- rate (WER)."""
134
- ),
135
- features=datasets.Features(
136
- {
137
- "file": datasets.Value("string"),
138
- "audio": datasets.features.Audio(sampling_rate=16_000),
139
- "text": datasets.Value("string"),
140
- "speaker_id": datasets.Value("int64"),
141
- "chapter_id": datasets.Value("int64"),
142
- "id": datasets.Value("string"),
143
- }
144
- ),
145
- supervised_keys=("file", "text"),
146
- url="http://www.openslr.org/12",
147
- data_url="data/LibriSpeech-test-clean.zip",
148
- ),
149
- SuperbConfig(
150
- name="ks",
151
- description=textwrap.dedent(
152
- """\
153
- Keyword Spotting (KS) detects preregistered keywords by classifying utterances into a predefined set of
154
- words. The task is usually performed on-device for the fast response time. Thus, accuracy, model size, and
155
- inference time are all crucial. SUPERB uses the widely used Speech Commands dataset v1.0 for the task.
156
- The dataset consists of ten classes of keywords, a class for silence, and an unknown class to include the
157
- false positive. The evaluation metric is accuracy (ACC)"""
158
- ),
159
- features=datasets.Features(
160
- {
161
- "file": datasets.Value("string"),
162
- "audio": datasets.features.Audio(sampling_rate=16_000),
163
- "label": datasets.ClassLabel(
164
- names=[
165
- "yes",
166
- "no",
167
- "up",
168
- "down",
169
- "left",
170
- "right",
171
- "on",
172
- "off",
173
- "stop",
174
- "go",
175
- "_silence_",
176
- "_unknown_",
177
- ]
178
- ),
179
- }
180
- ),
181
- supervised_keys=("file", "label"),
182
- url="https://www.tensorflow.org/datasets/catalog/speech_commands",
183
- data_url="data/speech_commands_test_set_v0.01.zip",
184
- ),
185
- SuperbConfig(
186
- name="ic",
187
- description=textwrap.dedent(
188
- """\
189
- Intent Classification (IC) classifies utterances into predefined classes to determine the intent of
190
- speakers. SUPERB uses the Fluent Speech Commands dataset, where each utterance is tagged with three intent
191
- labels: action, object, and location. The evaluation metric is accuracy (ACC)."""
192
- ),
193
- features=datasets.Features(
194
- {
195
- "file": datasets.Value("string"),
196
- "audio": datasets.features.Audio(sampling_rate=16_000),
197
- "speaker_id": datasets.Value("string"),
198
- "text": datasets.Value("string"),
199
- "action": datasets.ClassLabel(
200
- names=["activate", "bring", "change language", "deactivate", "decrease", "increase"]
201
- ),
202
- "object": datasets.ClassLabel(
203
- names=[
204
- "Chinese",
205
- "English",
206
- "German",
207
- "Korean",
208
- "heat",
209
- "juice",
210
- "lamp",
211
- "lights",
212
- "music",
213
- "newspaper",
214
- "none",
215
- "shoes",
216
- "socks",
217
- "volume",
218
- ]
219
- ),
220
- "location": datasets.ClassLabel(names=["bedroom", "kitchen", "none", "washroom"]),
221
- }
222
- ),
223
- # no default supervised keys, since there are 3 labels
224
- supervised_keys=None,
225
- url="https://fluent.ai/fluent-speech-commands-a-dataset-for-spoken-language-understanding-research/",
226
- data_url="data/fluent_speech_commands_dataset.zip",
227
- ),
228
- SuperbConfig(
229
- name="si",
230
- description=textwrap.dedent(
231
- """\
232
- Speaker Identification (SI) classifies each utterance for its speaker identity as a multi-class
233
- classification, where speakers are in the same predefined set for both training and testing. The widely
234
- used VoxCeleb1 dataset is adopted, and the evaluation metric is accuracy (ACC)."""
235
- ),
236
- features=datasets.Features(
237
- {
238
- "file": datasets.Value("string"),
239
- "audio": datasets.features.Audio(sampling_rate=16_000),
240
- "label": datasets.ClassLabel(names=[f"id{i + 10001}" for i in range(1251)]),
241
- }
242
- ),
243
- supervised_keys=("file", "label"),
244
- url="https://www.robots.ox.ac.uk/~vgg/data/voxceleb/vox1.html",
245
- data_url="data/VoxCeleb1.zip"
246
- ),
247
- SuperbConfig(
248
- name="er",
249
- description=textwrap.dedent(
250
- """\
251
- Emotion Recognition (ER) predicts an emotion class for each utterance. The most widely used ER dataset
252
- IEMOCAP is adopted, and we follow the conventional evaluation protocol: we drop the unbalance emotion
253
- classes to leave the final four classes with a similar amount of data points and cross-validates on five
254
- folds of the standard splits. The evaluation metric is accuracy (ACC)."""
255
- ),
256
- features=datasets.Features(
257
- {
258
- "file": datasets.Value("string"),
259
- "audio": datasets.features.Audio(sampling_rate=16_000),
260
- "label": datasets.ClassLabel(names=['neu', 'hap', 'ang', 'sad']),
261
- }
262
- ),
263
- supervised_keys=("file", "label"),
264
- url="https://sail.usc.edu/iemocap/",
265
- data_url="data/IEMOCAP_full_release.zip"
266
- ),
267
- ]
268
-
269
- def _info(self):
270
- return datasets.DatasetInfo(
271
- description=_DESCRIPTION,
272
- features=self.config.features,
273
- supervised_keys=self.config.supervised_keys,
274
- homepage=self.config.url,
275
- citation=_CITATION,
276
- task_templates=self.config.task_templates,
277
- )
278
-
279
- def _split_generators(self, dl_manager):
280
- if self.config.name == "asr":
281
- archive_path = dl_manager.download_and_extract(self.config.data_url)
282
- return [
283
- datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path}),
284
- ]
285
- elif self.config.name == "ks":
286
- archive_path = dl_manager.download_and_extract(self.config.data_url)
287
- return [
288
- datasets.SplitGenerator(
289
- name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path, "split": "test"}
290
- ),
291
- ]
292
- elif self.config.name == "ic":
293
- archive_path = dl_manager.download_and_extract(self.config.data_url)
294
- return [
295
- datasets.SplitGenerator(
296
- name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path, "split": "test"}
297
- ),
298
- ]
299
- elif self.config.name == "si":
300
- archive_path = dl_manager.download_and_extract(self.config.data_url)
301
- return [
302
- datasets.SplitGenerator(
303
- name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path, "split": 3}
304
- ),
305
- ]
306
- elif self.config.name == "sd":
307
- archive_path = dl_manager.download_and_extract(self.config.data_url)
308
- return [
309
- datasets.SplitGenerator(
310
- name=datasets.Split.TEST, gen_kwargs={"archive_path": archive_path, "split": "test"}
311
- )
312
- ]
313
- elif self.config.name == "er":
314
- archive_path = dl_manager.download_and_extract(self.config.data_url)
315
- return [
316
- datasets.SplitGenerator(
317
- name="session1", gen_kwargs={"archive_path": archive_path, "split": 1},
318
- )
319
- ]
320
-
321
- def _generate_examples(self, archive_path, split=None):
322
- """Generate examples."""
323
- if self.config.name == "asr":
324
- transcripts_glob = os.path.join(archive_path, "LibriSpeech", "*/*/*/*.txt")
325
- key = 0
326
- for transcript_path in sorted(glob.glob(transcripts_glob)):
327
- transcript_dir_path = os.path.dirname(transcript_path)
328
- with open(transcript_path, "r", encoding="utf-8") as f:
329
- for line in f:
330
- line = line.strip()
331
- id_, transcript = line.split(" ", 1)
332
- audio_file = f"{id_}.flac"
333
- speaker_id, chapter_id = [int(el) for el in id_.split("-")[:2]]
334
- audio_path = os.path.join(transcript_dir_path, audio_file)
335
- yield key, {
336
- "id": id_,
337
- "speaker_id": speaker_id,
338
- "chapter_id": chapter_id,
339
- "file": audio_path,
340
- "audio": audio_path,
341
- "text": transcript,
342
- }
343
- key += 1
344
- elif self.config.name == "ks":
345
- words = ["yes", "no", "up", "down", "left", "right", "on", "off", "stop", "go"]
346
- splits = _split_ks_files(archive_path, split)
347
- for key, audio_file in enumerate(sorted(splits[split])):
348
- base_dir, file_name = os.path.split(audio_file)
349
- _, word = os.path.split(base_dir)
350
- if word in words:
351
- label = word
352
- elif word == "_silence_" or word == "_background_noise_":
353
- label = "_silence_"
354
- else:
355
- label = "_unknown_"
356
- yield key, {"file": audio_file, "audio": audio_file, "label": label}
357
- elif self.config.name == "ic":
358
- root_path = os.path.join(archive_path, "fluent_speech_commands_dataset/")
359
- csv_path = os.path.join(root_path, f"data/{split}_data.csv")
360
- with open(csv_path, encoding="utf-8") as csv_file:
361
- csv_reader = csv.reader(csv_file, delimiter=",", skipinitialspace=True)
362
- next(csv_reader)
363
- for row in csv_reader:
364
- key, file_path, speaker_id, text, action, object_, location = row
365
- audio_path = os.path.join(root_path, file_path)
366
- yield key, {
367
- "file": audio_path,
368
- "audio": audio_path,
369
- "speaker_id": speaker_id,
370
- "text": text,
371
- "action": action,
372
- "object": object_,
373
- "location": location,
374
- }
375
- elif self.config.name == "si":
376
- wav_path = os.path.join(archive_path, "wav/")
377
- splits_path = os.path.join(archive_path, "veri_test_class.txt")
378
- with open(splits_path, "r", encoding="utf-8") as f:
379
- for key, line in enumerate(f):
380
- split_id, file_path = line.strip().split(" ")
381
- if int(split_id) != split:
382
- continue
383
- speaker_id = file_path.split("/")[0]
384
- audio_path = os.path.join(wav_path, file_path)
385
- yield key, {
386
- "file": audio_path,
387
- "audio": audio_path,
388
- "label": speaker_id,
389
- }
390
- elif self.config.name == "er":
391
- root_path = os.path.join(archive_path, f"Session{split}/")
392
- wav_path = os.path.join(root_path, "sentences/wav/")
393
- labels_path = os.path.join(root_path, "dialog/EmoEvaluation/*.txt")
394
- emotions = ['neu', 'hap', 'ang', 'sad', 'exc']
395
- key = 0
396
- for labels_file in sorted(glob.glob(labels_path)):
397
- with open(labels_file, "r", encoding="utf-8") as f:
398
- for line in f:
399
- if line[0] != "[":
400
- continue
401
- _, filename, emo, _ = line.split("\t")
402
- if emo not in emotions:
403
- continue
404
- wav_subdir = filename.rsplit("_", 1)[0]
405
- filename = f"{filename}.wav"
406
- audio_path = os.path.join(wav_path, wav_subdir, filename)
407
- yield key, {
408
- "file": audio_path,
409
- "audio": audio_path,
410
- "label": emo.replace("exc", "hap"),
411
- }
412
- key += 1
413
-
414
-
415
- def _split_ks_files(archive_path, split):
416
- audio_path = os.path.join(archive_path, "**/*.wav")
417
- audio_paths = glob.glob(audio_path)
418
- if split == "test":
419
- # use all available files for the test archive
420
- return {"test": audio_paths}
421
-
422
- val_list_file = os.path.join(archive_path, "validation_list.txt")
423
- test_list_file = os.path.join(archive_path, "testing_list.txt")
424
- with open(val_list_file, encoding="utf-8") as f:
425
- val_paths = f.read().strip().splitlines()
426
- val_paths = [os.path.join(archive_path, p) for p in val_paths]
427
- with open(test_list_file, encoding="utf-8") as f:
428
- test_paths = f.read().strip().splitlines()
429
- test_paths = [os.path.join(archive_path, p) for p in test_paths]
430
-
431
- # the paths for the train set is just whichever paths that do not exist in
432
- # either the test or validation splits
433
- train_paths = list(set(audio_paths) - set(val_paths) - set(test_paths))
434
-
435
- return {"train": train_paths, "val": val_paths}