from datasets import load_dataset, DatasetDict dataset_config = { "LOADING_SCRIPT_FILES": "policies.py", "CONFIG_NAME": "plain_text", "DATA_DIR": "data", "CACHE_DIR": "cache_policies", } ds = load_dataset( dataset_config["LOADING_SCRIPT_FILES"], dataset_config["CONFIG_NAME"], data_dir=dataset_config["DATA_DIR"], cache_dir=dataset_config["CACHE_DIR"] ) # 90% train, 10% test + validation # Split the 10% test + valid in half test, half valid train_testvalid = ds["train"].train_test_split(shuffle=True, test_size=0.1) test_valid = train_testvalid["test"].train_test_split( test_size=0.5) # gather everything into a single DatasetDict ds = DatasetDict({ "train": train_testvalid["train"], "test": test_valid["test"], "val": test_valid["train"], } ) print(ds)