Datasets:
Languages:
English
Multilinguality:
monolingual
Size Categories:
10K<n<100K
Annotations Creators:
expert-generated
License:
# coding=utf-8 | |
# Copyright 2022 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors. | |
# | |
# Licensed under the Apache License, Version 2.0 (the "License"); | |
# you may not use this file except in compliance with the License. | |
# You may obtain a copy of the License at | |
# | |
# http://www.apache.org/licenses/LICENSE-2.0 | |
# | |
# Unless required by applicable law or agreed to in writing, software | |
# distributed under the License is distributed on an "AS IS" BASIS, | |
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
# See the License for the specific language governing permissions and | |
# limitations under the License. | |
# Lint as: python3 | |
"""Openpi V2: A Dataset for tracking state changes in prcedural text by using an unrestricted library""" | |
import json | |
import os | |
import sys | |
import textwrap | |
import numpy as np | |
import datasets | |
logger = datasets.logging.get_logger(__name__) | |
_OPENPI_V2_CITATION = """\ | |
@inproceedings{ | |
title={{OPENPI V2}: } | |
author={} | |
note={} | |
year={2022} | |
} | |
""" | |
_OPENPI_V2_DESCRIPTION = """\ | |
TEMPORARY DESCRIPTION | |
""" | |
_LICENSE = "CC BY 4.0" | |
_VERSION = "1.0.0" | |
_HOMEPAGE = "https://allenai.org/data/openpi" | |
_URL = "https://huggingface.co/datasets/abhinavk/openpi_v2/resolve/main/data/" | |
_URLS = {"train": _URL + "train-data.json", | |
"dev": _URL + "dev-data.json", | |
"test": _URL + "test-data.json"} | |
class OpenpiConfig(datasets.BuilderConfig): | |
"""BuilderConfig for Openpi V2.""" | |
def __init__( | |
self, | |
features, | |
data_url, | |
citation, | |
url, | |
process_label = lambda x: x, | |
**kwargs, | |
): | |
super(OpenpiConfig, self).__init__(version = datasets.Version(_VERSION), **kwargs) | |
self.features = features | |
self.data_url = data_url | |
self.citation = citation | |
self.url = url | |
self.process_label = process_label | |
class OpenpiV2(datasets.GeneratorBasedBuilder): | |
BUILDER_CONFIGS = [ | |
OpenpiConfig( | |
name = "openpi_text", | |
description = textwrap.dedent( | |
"""\ | |
""" | |
), | |
features = datasets.Features({ | |
"goal": datasets.Value("string"), | |
"steps": [datasets.Value("string")], | |
"topics": datasets.Value("string"), | |
"image_urls": [datasets.Value("string")], | |
"states": [{ | |
"answers_openpiv1_metadata": { | |
"entity": datasets.Value("string"), | |
"attribute": datasets.Value("string"), | |
"answers": [datasets.Value("string")], | |
"modality": [datasets.Value("string")] | |
}, | |
"entity": datasets.Value("string"), | |
"attribute": datasets.Value("string"), | |
"answers": [datasets.Value("string")], | |
"saliency": datasets.Value("float32") | |
}] | |
}), | |
data_url = _URLS, | |
citation = textwrap.dedent( | |
"""\ | |
@inproceedings{ | |
title={}, | |
author={}, | |
booktitle={}, | |
year={} | |
}""" | |
), | |
url = _HOMEPAGE | |
), | |
OpenpiConfig( | |
name = "Task 1", | |
description = textwrap.dedent( | |
"""\ | |
Given paragraph (e.g., with 5 steps), identify entities that change | |
(challenge: implicit entities, some explicit entities that don’t change).""" | |
), | |
features = datasets.Features({ | |
"steps": [datasets.Value("string")], | |
"entity_changes": [[datasets.Value("string")]] | |
}), | |
data_url = _URLS, | |
citation = textwrap.dedent( | |
"""\ | |
@inproceedings{ | |
title={}, | |
author={}, | |
booktitle={}, | |
year={} | |
}""" | |
), | |
url = _HOMEPAGE | |
), | |
OpenpiConfig( | |
name = "Task 3", | |
description = textwrap.dedent( | |
"""\ | |
Given paragraph (e.g., with 5 steps), identify the attributes of entity that change | |
(challenge: implicit entities, attributes & many combinations).""" | |
), | |
features = datasets.Features({ | |
"steps": [datasets.Value("string")], | |
"attr_entity_changes": [datasets.Value("string")] | |
}), | |
data_url = _URLS, | |
citation = textwrap.dedent( | |
"""\ | |
@inproceedings{ | |
title={}, | |
author={}, | |
booktitle={}, | |
year={} | |
}""" | |
), | |
url = _HOMEPAGE | |
), | |
OpenpiConfig( | |
name = "Task 4", | |
description = textwrap.dedent( | |
"""\ | |
Task 4: Given paragraph & an entity, identify the sequence of attribute value changes | |
(challenge: implicit attributes).""" | |
), | |
features = datasets.Features({ | |
"steps": [datasets.Value("string")], | |
"entity": datasets.Value("string"), | |
"attribute_changes": [[datasets.Value("string")]] | |
}), | |
data_url = _URLS, | |
citation = textwrap.dedent( | |
"""\ | |
@inproceedings{ | |
title={}, | |
author={}, | |
booktitle={}, | |
year={} | |
}""" | |
), | |
url = _HOMEPAGE | |
), | |
OpenpiConfig( | |
name = "Task 7", | |
description = textwrap.dedent( | |
"""\ | |
Task 7: Given image url, identify the visual attributes of entity and | |
non-visual attributes of entity that change.""" | |
), | |
features = datasets.Features({ | |
"image_url": datasets.Value("string"), | |
"visual_attr": [datasets.Value("string")], | |
"non_visual_attr": [datasets.Value("string")] | |
}), | |
data_url = _URLS, | |
citation = textwrap.dedent( | |
"""\ | |
@inproceedings{ | |
title={}, | |
author={}, | |
booktitle={}, | |
year={} | |
}""" | |
), | |
url = _HOMEPAGE | |
), | |
] | |
def _info(self): | |
return datasets.DatasetInfo( | |
description = _OPENPI_V2_DESCRIPTION, | |
features = self.config.features, | |
supervised_keys = None, | |
homepage = self.config.url, | |
citation = self.config.citation + "\n" + _OPENPI_V2_CITATION | |
) | |
def _split_generators(self, dl_manager): | |
downloaded_files = dl_manager.download_and_extract(self.config.data_url) | |
return [ | |
datasets.SplitGenerator( | |
name = datasets.Split.TRAIN, | |
gen_kwargs = { | |
"filepath": downloaded_files["train"] | |
}, | |
), | |
datasets.SplitGenerator( | |
name = datasets.Split.VALIDATION, | |
gen_kwargs = { | |
"filepath": downloaded_files["dev"] | |
}, | |
), | |
datasets.SplitGenerator( | |
name = datasets.Split.TEST, | |
gen_kwargs = { | |
"filepath": downloaded_files["test"] | |
} | |
), | |
] | |
def change_occur(dataset): | |
for step in dataset: | |
if len(step) > 0: | |
return True | |
return False | |
def find_change(key, dataset): | |
res = [] | |
for state in dataset: | |
if OpenpiV2.change_occur(state["answers"]): | |
list_key = state[key].split(" | ") | |
res.append([el for el in list_key]) | |
return (res) | |
def find_attr_entity_change(dataset): | |
attr_change = [] | |
for state in dataset: | |
if OpenpiV2.change_occur(state["answers"]): | |
change_str = "[" + state["attribute"] + "] of [" + state["entity"] + "] changed" | |
attr_change.append(change_str) | |
return attr_change | |
def _generate_examples(self, filepath): | |
logger.info("generating examples from = %s", filepath) | |
if self.config.name == "openpi_text": | |
with open(filepath) as f: | |
dataset = json.load(f) | |
for id_ in dataset: | |
yield int(id_), { | |
"goal": dataset[id_]["goal"], | |
"steps": dataset[id_]["steps"], | |
"topics": dataset[id_]["topics"], | |
"image_urls": dataset[id_]["image_urls"], | |
"states": dataset[id_]["states"], | |
} | |
elif self.config.name == "Task 1": | |
with open(filepath) as f: | |
dataset = json.load(f) | |
for id_ in dataset: | |
steps_ar = dataset[id_]["steps"] | |
entity_changes = OpenpiV2.find_change("entity", dataset[id_]["states"]) | |
yield int(id_), { | |
"steps": steps_ar, | |
"entity_changes": entity_changes | |
} | |
elif self.config.name == "Task 3": | |
with open(filepath) as f: | |
dataset = json.load(f) | |
for id_ in dataset: | |
steps_ar = dataset[id_]["steps"] | |
attr_entity_changes = OpenpiV2.find_attr_entity_change(dataset[id_]["states"]) | |
yield int(id_), { | |
"steps": steps_ar, | |
"attr_entity_changes": attr_entity_changes | |
} | |
elif self.config.name == "Task 4": | |
with open(filepath) as f: | |
dataset = json.load(f) | |
for id_ in dataset: | |
steps_ar = dataset[id_]["steps"] | |
for state in dataset[id_]["states"]: | |
for el in state["entity"].split(" | "): | |
entity = el | |
attribute_changes = [] | |
for state2 in dataset[id_]["states"]: | |
flag = False | |
for el2 in state2["entity"].split(" | "): | |
if entity == el2: | |
flag = True | |
break | |
if flag == False: | |
continue | |
if OpenpiV2.change_occur(state2["answers"]): | |
list_attribute = state2["attribute"].split(" | ") | |
attribute_changes.append([el for el in list_attribute]) | |
yield int(id_), { | |
"steps": steps_ar, | |
"entity": entity, | |
"attribute_changes": attribute_changes | |
} | |
elif self.config.name == "Task 7": | |
with open(filepath) as f: | |
dataset = json.load(f) | |
for id_ in dataset: | |
N = len(dataset[id_]["image_urls"]) | |
for i in range(N): | |
image_url = dataset[id_]["image_urls"][i] | |
visual_attr = [] | |
non_visual_attr = [] | |
for state in dataset[id_]["states"]: | |
if len(state["answers"][i]) > 0: | |
visual = False | |
non_visual = False | |
for el in state["answers_openpiv1_metadata"]["modality"][i].split(" | "): | |
visual = (visual or (el == "with_image")) | |
non_visual = (non_visual or (el == "without_image")) | |
change_str = "[" + state["attribute"] + "] of [" + state["entity"] + "] changed" | |
if visual: | |
visual_attr.append(change_str) | |
if non_visual: | |
non_visual_attr.append(change_str) | |
yield int(id_), { | |
"image_url": image_url, | |
"visual_attr": visual_attr, | |
"non_visual_attr": non_visual_attr | |
} | |