mj-new commited on
Commit
0d750c4
1 Parent(s): eb258ba

Fixed bug in testing script

Browse files
Files changed (2) hide show
  1. pl-asr-bigos-v2.py +7 -6
  2. test.py +1 -1
pl-asr-bigos-v2.py CHANGED
@@ -62,10 +62,12 @@ _BASE_PATH = "data/{subset}/"
62
  _DATA_URL = _BASE_PATH + "{split}.tar.gz"
63
  _META_URL = _BASE_PATH + "{split}.tsv"
64
 
 
 
65
  class BigosConfig(datasets.BuilderConfig):
66
 
67
  def __init__(
68
- self, name, description, citation, homepage
69
  ):
70
  super(BigosConfig, self).__init__(
71
  name=self.name,
@@ -133,14 +135,13 @@ class Bigos(datasets.GeneratorBasedBuilder):
133
  )
134
 
135
  def _split_generators(self, dl_manager):
136
- splits = ["test", "train", "validation"]
137
 
138
  if self.config.name == "all":
139
- data_urls = {split: [_DATA_URL.format(subset=subset,split=split) for subset in _BIGOS_SUBSETS] for split in splits}
140
- meta_urls = {split: [_META_URL.format(subset=subset,split=split) for subset in _BIGOS_SUBSETS] for split in splits}
141
  else:
142
- data_urls = {split: [_DATA_URL.format(subset=self.config.name, split=split)] for split in splits}
143
- meta_urls = {split: [_META_URL.format(subset=self.config.name, split=split)] for split in splits}
144
 
145
  archive_paths = dl_manager.download(data_urls)
146
  local_extracted_archives = dl_manager.extract(archive_paths) if not dl_manager.is_streaming else {}
 
62
  _DATA_URL = _BASE_PATH + "{split}.tar.gz"
63
  _META_URL = _BASE_PATH + "{split}.tsv"
64
 
65
+ SPLITS=["train","validation","test"]
66
+
67
  class BigosConfig(datasets.BuilderConfig):
68
 
69
  def __init__(
70
+ self, name, description, citation, homepage,
71
  ):
72
  super(BigosConfig, self).__init__(
73
  name=self.name,
 
135
  )
136
 
137
  def _split_generators(self, dl_manager):
 
138
 
139
  if self.config.name == "all":
140
+ data_urls = {split: [_DATA_URL.format(subset=subset,split=split) for subset in _BIGOS_SUBSETS] for split in SPLITS}
141
+ meta_urls = {split: [_META_URL.format(subset=subset,split=split) for subset in _BIGOS_SUBSETS] for split in SPLITS}
142
  else:
143
+ data_urls = {split: [_DATA_URL.format(subset=self.config.name, split=split)] for split in SPLITS}
144
+ meta_urls = {split: [_META_URL.format(subset=self.config.name, split=split)] for split in SPLITS}
145
 
146
  archive_paths = dl_manager.download(data_urls)
147
  local_extracted_archives = dl_manager.extract(archive_paths) if not dl_manager.is_streaming else {}
test.py CHANGED
@@ -2,7 +2,7 @@ import datasets
2
  from datasets import load_dataset
3
 
4
  # test reading all subsets for "test" split
5
- _BIGOS_SUBSETS = ["pjatk-clarin_mobile-15", "pjatk-clarin_studio-15", "fair-mls-20", "mailabs-corpus_librivox-23", "mozilla-common_voice_15-19", "pwr-azon_read-20", "pwr-azon_spont-20", "pwr-maleset-unk", "pwr-shortwords-unk", "pwr-viu-unk", "google-fleurs-22", "polyai-minds14-21"]
6
 
7
  splits=["test", "validation", "train"]
8
  # Refer to documentation for the descriptions of splits and subsets
 
2
  from datasets import load_dataset
3
 
4
  # test reading all subsets for "test" split
5
+ _BIGOS_SUBSETS = ["pjatk-clarin_mobile-15", "pjatk-clarin_studio-15", "fair-mls-20", "mailabs-corpus_librivox-19", "mozilla-common_voice_15-23", "pwr-azon_read-20", "pwr-azon_spont-20", "pwr-maleset-unk", "pwr-shortwords-unk", "pwr-viu-unk", "google-fleurs-22", "polyai-minds14-21"]
6
 
7
  splits=["test", "validation", "train"]
8
  # Refer to documentation for the descriptions of splits and subsets