import datasets import os import json _DESCRIPTION = "lm-polygraph wrapper for xsum dataset" _DATA_DIRECTORY = "." VERSION = datasets.Version("0.0.1") _CONFIG = { "dataset": "xsum", "splits": ["train", "validation", "test"], "input_column": "document", "output_column": "summary", "prompt": "Here's the text and it's short one-sentence summary.\n\nText:\n{text}\n\nSummary (one sentence):\n", } def _prepare_dataset(dataset): x, y = dataset[_CONFIG["input_column"]], dataset[_CONFIG["output_column"]] if _CONFIG.get("prompt"): for i in range(len(x)): x[i] = _CONFIG["prompt"].format(text=x[i]) return x, y class PolygraphXsum(datasets.GeneratorBasedBuilder): """lm-polygraph wrapper for xsum dataset""" def _info(self): features = datasets.Features( { "input": datasets.Value("string"), "output": datasets.Value("string"), } ) return datasets.DatasetInfo( description=_DESCRIPTION, features=features, ) def _split_generators(self, dl_manager): dataset = datasets.load_dataset(_CONFIG["dataset"], trust_remote_code=True) def download_custom_dataset(src_url: str, dst_path: str): split = src_url x, y = _prepare_dataset(dataset[split]) result_dataset = datasets.Dataset.from_dict({"input": x, "output": y}) result_dataset.save_to_disk(dst_path) downloaded_files = dl_manager.download_custom({split: split for split in _CONFIG["splits"]}, download_custom_dataset) return [ datasets.SplitGenerator( name=datasets.Split.TRAIN, gen_kwargs={ "filepath": downloaded_files["train"], }), datasets.SplitGenerator( name=datasets.Split.VALIDATION, gen_kwargs={ "filepath": downloaded_files["validation"], }), datasets.SplitGenerator( name=datasets.Split.TEST, gen_kwargs={ "filepath": downloaded_files["test"], }) ] def _generate_examples(self, filepath): dataset = datasets.Dataset.load_from_disk(filepath) for i in range(len(dataset)): yield i, dataset[i]