mariosasko commited on
Commit
26cd3a2
1 Parent(s): 9cea7d6

Update wikipedia.py

Browse files
Files changed (1) hide show
  1. wikipedia.py +18 -4
wikipedia.py CHANGED
@@ -909,6 +909,16 @@ class WikipediaConfig(datasets.BuilderConfig):
909
  _DATE = "20220301"
910
 
911
 
 
 
 
 
 
 
 
 
 
 
912
  class Wikipedia(datasets.ArrowBasedBuilder):
913
  """Wikipedia dataset."""
914
 
@@ -951,13 +961,16 @@ class Wikipedia(datasets.ArrowBasedBuilder):
951
  )
952
 
953
  def _split_generators(self, dl_manager):
954
- processed_data_dir = os.path.join(dl_manager._base_path, "data", self.config.name)
955
- is_processed = os.path.exists(processed_data_dir)
956
- if is_processed:
957
- parquet_urls = sorted(os.path.join(processed_data_dir, parquet_file) for parquet_file in os.listdir(processed_data_dir))
 
 
958
  # Use dictionary since testing mock always returns the same result.
959
  downloaded_files = dl_manager.download({"parquet": parquet_urls})
960
  files = downloaded_files["parquet"]
 
961
  else:
962
  def _base_url(lang):
963
  return _BASE_URL_TMPL.format(lang=lang.replace("-", "_"), date=self.config.date)
@@ -989,6 +1002,7 @@ class Wikipedia(datasets.ArrowBasedBuilder):
989
  # Use dictionary since testing mock always returns the same result.
990
  downloaded_files = dl_manager.download({"xml": xml_urls})
991
  files = downloaded_files["xml"]
 
992
 
993
  return [
994
  datasets.SplitGenerator( # pylint:disable=g-complex-comprehension
 
909
  _DATE = "20220301"
910
 
911
 
912
+ PROCESSED_CONFIG_TO_NUM_SHARDS = {
913
+ "20220301.de": 18,
914
+ "20220301.en": 41,
915
+ "20220301.fr": 15,
916
+ "20220301.frr": 1,
917
+ "20220301.it": 10,
918
+ "20220301.simple": 1,
919
+ }
920
+
921
+
922
  class Wikipedia(datasets.ArrowBasedBuilder):
923
  """Wikipedia dataset."""
924
 
 
961
  )
962
 
963
  def _split_generators(self, dl_manager):
964
+ if self.config.name in PROCESSED_CONFIG_TO_NUM_SHARDS:
965
+ num_shards = PROCESSED_CONFIG_TO_NUM_SHARDS[self.config.name]
966
+ parquet_urls = [
967
+ os.path.join("data", self.config.name, f"train-{i:05d}-{num_shards:05d}.parquet")
968
+ for i in range(num_shards)
969
+ ]
970
  # Use dictionary since testing mock always returns the same result.
971
  downloaded_files = dl_manager.download({"parquet": parquet_urls})
972
  files = downloaded_files["parquet"]
973
+ is_processed = True
974
  else:
975
  def _base_url(lang):
976
  return _BASE_URL_TMPL.format(lang=lang.replace("-", "_"), date=self.config.date)
 
1002
  # Use dictionary since testing mock always returns the same result.
1003
  downloaded_files = dl_manager.download({"xml": xml_urls})
1004
  files = downloaded_files["xml"]
1005
+ is_processed = False
1006
 
1007
  return [
1008
  datasets.SplitGenerator( # pylint:disable=g-complex-comprehension