Datasets:

ArXiv:
ShivamPR21 commited on
Commit
740964d
1 Parent(s): 7c53931

Support for deduplication parameter.

Browse files

Signed-off-by: ShivamPR21 <pandeyshivam2023robotics@gmail.com>

Files changed (1) hide show
  1. RedPajama-Data-V2.py +7 -53
RedPajama-Data-V2.py CHANGED
@@ -143,11 +143,6 @@ class RedPajamaV2(datasets.GeneratorBasedBuilder):
143
  """RedPajama V2: Quality annotated Web Text Documents."""
144
 
145
  BUILDER_CONFIGS = [
146
- # RedPajamaDataV2Config(
147
- # name="sample",
148
- # version=datasets.Version("1.0.0", ""),
149
- # description=f"RedPajamaV2 Sample",
150
- # ),
151
  RedPajamaDataV2Config(
152
  name="sample-10B",
153
  version=datasets.Version("1.0.0", ""),
@@ -189,53 +184,6 @@ class RedPajamaV2(datasets.GeneratorBasedBuilder):
189
  supervised_keys=None,
190
  )
191
 
192
- # def _split_generators_sample(self, dl_manager):
193
- # # fetch list of base tags
194
- # sample_base_tags_fp = dl_manager.download_and_extract(
195
- # "sample/sample_listings.txt"
196
- # )
197
- # with open(sample_base_tags_fp, "r") as fd:
198
- # sample_base_tags = [line.strip() for line in fd]
199
-
200
- # # fetch documents
201
- # logger.info(f"Downloading {len(sample_base_tags)} documents files.")
202
- # documents_files = dl_manager.download(
203
- # {
204
- # base_tag: f"sample/documents/{base_tag}.json.gz"
205
- # for base_tag in sample_base_tags
206
- # }
207
- # )
208
-
209
- # # fetch quality signals
210
- # logger.info(f"Downloading {len(sample_base_tags)} quality signals files.")
211
- # quality_signals_files = dl_manager.download(
212
- # {
213
- # base_tag: f"sample/quality_signals/{base_tag}.signals.json.gz"
214
- # for base_tag in sample_base_tags
215
- # }
216
- # )
217
-
218
- # # fetch ids of duplicates
219
- # logger.info(f"Downloading {len(sample_base_tags)} duplicates ids files.")
220
- # duplicates_ids_files = dl_manager.download(
221
- # {
222
- # base_tag: f"sample/duplicates/{base_tag}.duplicates.parquet"
223
- # for base_tag in sample_base_tags
224
- # }
225
- # )
226
-
227
- # return [
228
- # datasets.SplitGenerator(
229
- # name=datasets.Split.TRAIN,
230
- # gen_kwargs={
231
- # "base_tags": sample_base_tags,
232
- # "documents_files": documents_files,
233
- # "quality_signals_files": quality_signals_files,
234
- # "duplicates_ids_files": duplicates_ids_files,
235
- # },
236
- # )
237
- # ]
238
-
239
  @staticmethod
240
  def _align_order(source, target):
241
  res = []
@@ -393,6 +341,8 @@ class RedPajamaV2(datasets.GeneratorBasedBuilder):
393
  return
394
 
395
  def _handle_head_middle(self, base_tag, doc_file, qs_file, dupe_file):
 
 
396
  if qs_file is None:
397
  yield from self._handle_tail(base_tag, doc_file, None, None)
398
  return
@@ -414,6 +364,10 @@ class RedPajamaV2(datasets.GeneratorBasedBuilder):
414
  with gzip.open(qs_file, "rt", encoding="utf-8") as qf:
415
  for row, (doc, qs) in enumerate(zip(df, qf)):
416
  doc_id = f"{base_tag}.json.gz/{row}"
 
 
 
 
417
 
418
  try:
419
  yield self.handle_record(
@@ -421,7 +375,7 @@ class RedPajamaV2(datasets.GeneratorBasedBuilder):
421
  doc_id=doc_id,
422
  doc=doc,
423
  qs=qs,
424
- is_duplicate=doc_id in duplicates,
425
  )
426
  except Exception as e:
427
  logger.warning(
 
143
  """RedPajama V2: Quality annotated Web Text Documents."""
144
 
145
  BUILDER_CONFIGS = [
 
 
 
 
 
146
  RedPajamaDataV2Config(
147
  name="sample-10B",
148
  version=datasets.Version("1.0.0", ""),
 
184
  supervised_keys=None,
185
  )
186
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
187
  @staticmethod
188
  def _align_order(source, target):
189
  res = []
 
341
  return
342
 
343
  def _handle_head_middle(self, base_tag, doc_file, qs_file, dupe_file):
344
+ deduplication = getattr(self.config, 'deduplication', False)
345
+
346
  if qs_file is None:
347
  yield from self._handle_tail(base_tag, doc_file, None, None)
348
  return
 
364
  with gzip.open(qs_file, "rt", encoding="utf-8") as qf:
365
  for row, (doc, qs) in enumerate(zip(df, qf)):
366
  doc_id = f"{base_tag}.json.gz/{row}"
367
+ is_duplicated = doc_id in duplicates
368
+
369
+ if is_duplicated and deduplication:
370
+ continue
371
 
372
  try:
373
  yield self.handle_record(
 
375
  doc_id=doc_id,
376
  doc=doc,
377
  qs=qs,
378
+ is_duplicate=is_duplicated,
379
  )
380
  except Exception as e:
381
  logger.warning(