TRIP / TRIP.py
Jiayi-Pan's picture
Update TRIP.py
cbe2478
import os
from zipfile import ZipFile
import datasets
_CITATION = """\
@misc{storks2021tiered,
title={Tiered Reasoning for Intuitive Physics: Toward Verifiable Commonsense Language Understanding},
author={Shane Storks and Qiaozi Gao and Yichi Zhang and Joyce Chai},
year={2021},
booktitle={Findings of the Association for Computational Linguistics: EMNLP 2021},
location={Punta Cana, Dominican Republic},
publisher={Association for Computational Linguistics},
}
"""
_DESCRIPTION = """\
We introduce Tiered Reasoning for Intuitive Physics (TRIP), a novel commonsense reasoning dataset with dense annotations that enable multi-tiered evaluation of machines’ reasoning process.
"""
_HOMEPAGE = "https://huggingface.co/datasets/sled-umich/TRIP"
class TRIP(datasets.GeneratorBasedBuilder):
VERSION = datasets.Version("1.0.1")
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"example_id": datasets.Value("string"),
"length": datasets.Value("int32"),
"label": datasets.Value("int32"),
"breakpoint": datasets.Value("int32"),
"confl_sents": [datasets.Value("int32")],
"confl_pairs": [[datasets.Value("int32")]],
"stories":[{
"story_id": datasets.Value("int32"),
"worker_id": datasets.Value("string"),
"type": datasets.Value("string"),
"idx": datasets.Value("int32"),
"aug": datasets.Value("bool"),
"actor": datasets.Value("string"),
"location": datasets.Value("string"),
"objects": datasets.Value("string"),
"sentences": datasets.features.Sequence(datasets.Value("string")),
"length": datasets.Value("int32"),
"example_id": datasets.Value("string"),
"plausible": datasets.Value("bool"),
"breakpoint": datasets.Value("int32"),
"confl_sents": datasets.features.Sequence(datasets.Value("int32")),
"confl_pairs": [[datasets.Value("int32")]],
"state-h_location": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-conscious": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-wearing": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-h_wet": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-hygiene": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-location": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-exist": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-clean": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-power": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-functional": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-pieces": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-wet": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-open": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-temperature": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-solid": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-contain": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-running": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-moveable": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-mixed": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
"state-edible": [[{"entity": datasets.Value("string"), "num": datasets.Value("int32")}]],
}]
}
),
homepage=_HOMEPAGE,
citation=_CITATION,
)
def _split_generators(self, dl_manager: datasets.DownloadManager):
"""Returns SplitGenerators."""
splits = ["ClozeDev", "ClozeTest", "ClozeTrain", "OrderDev", "OrderTest", "OrderTrain"]
data_roots = dl_manager.download_and_extract({k: f"trip-{k}.jsonl" for k in splits})
return [
datasets.SplitGenerator(
name=split,
gen_kwargs={
"filepath": data_roots[split],
},
)
for split in splits
]
def _generate_examples(self, filepath):
# load jsonl file
import json
with open(filepath) as f:
data = [json.loads(line) for line in f]
for i, example in enumerate(data):
yield i, example