Datasets:

Languages:
English
Multilinguality:
multilingual
Size Categories:
100M<n<1B
Language Creators:
found
Annotations Creators:
no-annotation
Source Datasets:
original
ArXiv:
License:
system HF staff commited on
Commit
d03cf9c
1 Parent(s): 213288b

Update files from the datasets library (from 1.1.0)

Browse files

Release notes: https://github.com/huggingface/datasets/releases/tag/1.1.0

Files changed (1) hide show
  1. c4.py +3 -2
c4.py CHANGED
@@ -145,6 +145,7 @@ class C4(datasets.BeamBasedBuilder):
145
  ),
146
  ]
147
 
 
148
  def manual_download_instructions(self):
149
  return """\
150
  For the WebText-like config, you must manually download 'OpenWebText.zip'
@@ -201,7 +202,7 @@ class C4(datasets.BeamBasedBuilder):
201
  wet_urls = []
202
  for wet_path_url in file_paths["wet_path_urls"]:
203
  with open(wet_path_url, "r", encoding="utf-8") as f:
204
- wet_urls.extend(["%s/%s" % (_DOWNLOAD_HOST, l.strip()) for l in f])
205
  file_paths["wet_urls"] = wet_urls
206
  file_paths["wet_files"] = []
207
 
@@ -300,7 +301,7 @@ class C4(datasets.BeamBasedBuilder):
300
  # Output: url, text
301
  if self.config.clean:
302
  with open(file_paths["badwords"], "r", encoding="utf-8") as f:
303
- badwords = [l.strip() for l in f]
304
  page_content = page_content | "clean_pages" >> beam.FlatMap(get_clean_page_fn(badwords))
305
  page_content = remove_duplicate_text(page_content)
306
 
 
145
  ),
146
  ]
147
 
148
+ @property
149
  def manual_download_instructions(self):
150
  return """\
151
  For the WebText-like config, you must manually download 'OpenWebText.zip'
 
202
  wet_urls = []
203
  for wet_path_url in file_paths["wet_path_urls"]:
204
  with open(wet_path_url, "r", encoding="utf-8") as f:
205
+ wet_urls.extend(["%s/%s" % (_DOWNLOAD_HOST, line.strip()) for line in f])
206
  file_paths["wet_urls"] = wet_urls
207
  file_paths["wet_files"] = []
208
 
 
301
  # Output: url, text
302
  if self.config.clean:
303
  with open(file_paths["badwords"], "r", encoding="utf-8") as f:
304
+ badwords = [line.strip() for line in f]
305
  page_content = page_content | "clean_pages" >> beam.FlatMap(get_clean_page_fn(badwords))
306
  page_content = remove_duplicate_text(page_content)
307