VoxLingua107 / VoxLingua107.py
kunkuk
Changed VoxLingua107.py
8a18d3c
import datasets
import collections
import gzip
import textwrap
_DESCRIPTION = "tba"
_URL = "tba"
_CITATION = "tba"
_LICENSE = "tba"
class VoxLinguaConfig(datasets.BuilderConfig):
"""VoxLingua107 corpus."""
def __init__(
self,
features,
url,
data_url=None,
supervised_keys=None,
shuffled=False,
deduplicated=False,
task_templates=None,
**kwargs,
):
super(VoxLinguaConfig, self).__init__(version=datasets.Version("1.9.0", ""), **kwargs)
self.features = features
self.data_url = data_url
self.url = url
self.supervised_keys = supervised_keys
self.task_templates = task_templates
def _languages():
"""Create the sorted dictionary of language codes, and language names.
Returns:
The sorted dictionary as an instance of `collections.OrderedDict`.
"""
langs = {
"af":"",
"am":"",
"ar":"",
"as":"",
"az":"",
"ba":"",
"be":"",
"bg":"",
"bn":"",
"bo":"",
"br":"",
"bs":"",
"ca":"",
"ceb":"",
"cs":"",
"cy":"",
"da":"",
"de":"",
"el":"",
"en":"",
"eo":"",
"es":"",
"et":"",
"eu":"",
"fa":"",
"fi":"",
"fo":"",
"fr":"",
"gl":"",
"gn":"",
"gu":"",
"gv":"",
"ha":"",
"haw":"",
"hi":"",
"hr":"",
"ht":"",
"hu":"",
"hy":"",
"ia":"",
"id":"",
"is":"",
"it":"",
"iw":"",
"ja":"",
"jw":"",
"ka":"",
"kk":"",
"km":"",
"kn":"",
"ko":"",
"la":"",
"lb":"",
"ln":"",
"lo":"",
"lt":"",
"lv":"",
"mg":"",
"mi":"",
"mk":"",
"ml":"",
"mn":"",
"mr":"",
"ms":"",
"mt":"",
"my":"",
"ne":"",
"nl":"",
"nn":"",
"no":"",
"oc":"",
"pa":"",
"pl":"",
"ps":"",
"pt":"",
"ro":"",
"ru":"",
"sa":"",
"sco":"",
"sd":"",
"si":"",
"sk":"",
"sl":"",
"sn":"",
"so":"",
"sq":"",
"sr":"",
"su":"",
"sv":"",
"sw":"",
"ta":"",
"te":"",
"tg":"",
"th":"",
"tk":"",
"tl":"",
"tr":"",
"tt":"",
"uk":"",
"ur":"",
"uz":"",
"vi":"",
"war":"",
"yi":"",
"yo":"",
"zh":""
}
return collections.OrderedDict(sorted(langs.items()))
class VoxLingua(datasets.GeneratorBasedBuilder):
BUILDER_CONFIGS = [
VoxLinguaConfig(
name = language,
description=textwrap.dedent(
""" tbd """
),
shuffled=False,
deduplicated=False,
features=datasets.Features(
{
"file": datasets.Value("string"),
"audio": datasets.Audio(sampling_rate=16_000),
"label": datasets.ClassLabel(
names=[f"{i}" for i in range(107)]
),
}
),
supervised_keys=("file", "label"),
url="http://bark.phon.ioc.ee/voxlingua107/",
data_url="http://bark.phon.ioc.ee/voxlingua107/{language}.zip"
)
for language in _languages()
]
BUILDER_CONFIG_CLASS = VoxLinguaConfig
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=self.config.features,
supervised_keys=self.config.supervised_keys,
homepage=self.config.url,
citation=_CITATION,
task_templates=self.config.task_templates,
)
def _split_generators(self, dl_manager):
train_data_urls = [self.config.url + f"{key}.zip" for key in _languages().keys()]
downloaded_files_train = dl_manager.download(train_data_urls)
dev_data_url = [self.config.url + f"dev.zip"]
downloaded_files_dev = dl_manager.download(dev_data_url)
return [
datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"archive_path": downloaded_files_train}),
datasets.SplitGenerator(name=datasets.Split.DEV, gen_kwargs={"archive_path": downloaded_files_dev}),
]
def _generate_examples(self, archive_path, split=None):
return ""