|
|
|
|
|
""" |
|
SyntaxGym dataset as used in Hu et al. (2020). |
|
""" |
|
|
|
|
|
from copy import deepcopy |
|
import json |
|
from pathlib import Path |
|
import re |
|
from typing import List |
|
|
|
import datasets |
|
|
|
from .prediction import Prediction |
|
|
|
|
|
_CITATION = """ |
|
@inproceedings{Hu:et-al:2020, |
|
author = {Hu, Jennifer and Gauthier, Jon and Qian, Peng and Wilcox, Ethan and Levy, Roger}, |
|
title = {A systematic assessment of syntactic generalization in neural language models}, |
|
booktitle = {Proceedings of the Association of Computational Linguistics}, |
|
year = {2020} |
|
} |
|
""" |
|
|
|
_DESCRIPTION = "" |
|
|
|
|
|
_PROJECT_URL = "https://syntaxgym.org" |
|
_DOWNLOAD_URL = "https://github.com/cpllab/syntactic-generalization" |
|
|
|
|
|
SUITE_JSONS = [] |
|
for suite_f in Path("test_suites").glob("*.json"): |
|
with suite_f.open() as f: |
|
SUITE_JSONS.append(json.load(f)) |
|
SUITE_JSONS = {suite["meta"]["name"]: suite for suite in SUITE_JSONS} |
|
|
|
|
|
class SyntaxGymSuiteConfig(datasets.BuilderConfig): |
|
|
|
def __init__(self, suite_json, version=datasets.Version("1.0.0"), **kwargs): |
|
self.meta = suite_json["meta"] |
|
name = self.meta["name"] |
|
description = f"SyntaxGym test suite {name}.\n" + _DESCRIPTION |
|
|
|
super().__init__(name=name, description=description, version=version, |
|
**kwargs) |
|
|
|
self.features = list(suite_json["region_meta"].values()) |
|
|
|
|
|
class SyntaxGym(datasets.GeneratorBasedBuilder): |
|
|
|
BUILDER_CONFIGS = [SyntaxGymSuiteConfig(suite_json) |
|
for suite_json in SUITE_JSONS.values()] |
|
|
|
def _info(self): |
|
condition_spec = { |
|
"condition_name": datasets.Value("string"), |
|
"content": datasets.Value("string"), |
|
"regions": datasets.Sequence({ |
|
"region_number": datasets.Value("int32"), |
|
"content": datasets.Value("string") |
|
}) |
|
} |
|
|
|
features = { |
|
"item_number": datasets.Value("string"), |
|
"conditions": datasets.Sequence(condition_spec) |
|
} |
|
|
|
citation = "" |
|
if self.config.meta["reference"]: |
|
citation = f"Test suite citation: {self.meta['reference']}\n" |
|
citation += f"SyntaxGym citation:\n{_CITATION}" |
|
|
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=datasets.Features(features), |
|
homepage=_PROJECT_URL, |
|
citation=citation, |
|
) |
|
|
|
def _split_generators(self, dl_manager: datasets.DownloadManager) -> List[datasets.SplitGenerator]: |
|
return [datasets.SplitGenerator(name=datasets.Split.TEST, |
|
gen_kwargs={"name": self.config.name})] |
|
|
|
def _generate_examples(self, name): |
|
|
|
suite_json = SUITE_JSONS[name] |
|
|
|
for item in suite_json["items"]: |
|
|
|
for cond in item["conditions"]: |
|
cond["content"] = " ".join([region["content"].lstrip() |
|
for region in cond["regions"] |
|
if region["content"].strip() != ""]) |
|
cond["content"] = re.sub(r"\s+,", ",", cond["content"]) |
|
|
|
yield item["item_number"], item |
|
|
|
|
|
class SyntaxGymMetric(datasets.Metric): |
|
""" |
|
SyntaxGym prediction evaluation metric. |
|
""" |
|
|
|
def __init__(self, *args, **kwargs): |
|
super().__init__(*args, **kwargs) |
|
self.suite = SUITE_JSONS[self.config_name] |
|
self.predictions = [ |
|
Prediction(idx, p["formula"], "sum") |
|
for idx, p in enumerate(self.suite["predictions"]) |
|
] |
|
|
|
def _info(self): |
|
features = datasets.Features({ |
|
"conditions": datasets.Sequence({ |
|
"condition_name": datasets.Value("string"), |
|
"regions": datasets.Sequence({ |
|
"region_number": datasets.Value("int32"), |
|
"metric_value": { |
|
"sum": datasets.Value("float32") |
|
}, |
|
}), |
|
}) |
|
}) |
|
return datasets.MetricInfo( |
|
description="TODO", |
|
citation=_CITATION, |
|
inputs_description="TODO", |
|
features=features, |
|
) |
|
|
|
def _compute(self, region_surprisals): |
|
suite_with_results = deepcopy(self.suite) |
|
|