# adapted from: https://github.com/huggingface/transformers/blob/master/examples/research_projects/codeparrot/scripts/preprocessing.py import datasets def get_hash(example): """Get hash of text field.""" return {"hash": hash(example["text"])} def check_uniques(example, uniques): """Check if current hash is still in set of unique hashes and remove if true.""" if example["hash"] in uniques: uniques.remove(example["hash"]) return True else: return False def filter(example, uniques): """Filter dataset with unique values.""" if not check_uniques(example, uniques): return False else: return True dataset = datasets.load_from_disk("/researchdisk/mc4_3.1.0_fi_cleaned") # TRAIN SPLIT DEDUPLICATION print(f"Size of original dataset train: {len(dataset['train'])}") dataset["train"] = dataset["train"].map(get_hash, num_proc=96) # Deduplicate hashes uniques = set(dataset["train"].unique("hash")) frac = len(uniques) / len(dataset["train"]) print(f"Fraction of duplicates: {1-frac:.2%}") # Deduplicate data dataset_train_deduplicated = dataset["train"].filter( filter, fn_kwargs={"uniques": uniques}, num_proc=64 ) print(f"Size of filtered dataset train: {len(dataset_train_deduplicated)}") # VALIDATION SPLIT DEDUPLICATION print(f"Size of original dataset valid: {len(dataset['validation'])}") dataset["validation"] = dataset["validation"].map(get_hash, num_proc=96) # Deduplicate hashes uniques = set(dataset["validation"].unique("hash")) frac = len(uniques) / len(dataset["validation"]) print(f"Fraction of duplicates: {1-frac:.2%}") # Deduplicate data dataset_valid_deduplicated = dataset["validation"].filter( filter, fn_kwargs={"uniques": uniques}, num_proc=32 ) print(f"Size of filtered dataset valid: {len(dataset_valid_deduplicated)}") # SAVE DEDUPLICATED DATASET dataset_train_deduplicated = dataset_train_deduplicated.remove_columns(["hash"]) dataset_valid_deduplicated = dataset_valid_deduplicated.remove_columns(["hash"]) fi_mc4_cleaned = datasets.DatasetDict() fi_mc4_cleaned["train"] = dataset_train_deduplicated fi_mc4_cleaned["validation"] = dataset_valid_deduplicated fi_mc4_cleaned.save_to_disk("/researchdisk/mc4_3.1.0_fi_cleaned", num_proc=32)