lsoie / lsoie.py
wardenga's picture
typo
70f29c2
# -*- coding: utf-8 -*-
"""LSOIE: A Large-Scale Dataset for Supervised Open Information Extraction."""
import os
import datasets
from datasets.info import SupervisedKeysData
from zipfile import ZipFile
logger = datasets.logging.get_logger(__name__)
_CITATION = """\
@article{lsoie-2021,
title={{LSOIE}: A Large-Scale Dataset for Supervised Open Information Extraction},
author={{Solawetz}, Jacob and {Larson}, Stefan},
journal={arXiv preprint arXiv:2101.11177},
year={2019},
url="https://arxiv.org/pdf/2101.11177.pdf"
}
"""
_DESCRIPTION = """
The Large Scale Open Information Extraction Dataset (LSOIE), is a dataset 20
times larger than the next largest human-annotated Open Information Extraction
(OIE) dataset. LSOIE is a built upon the QA-SRL 2.0 dataset.
"""
_URL = "https://github.com/Jacobsolawetz/large-scale-oie/"
_URLS = {
"zip": _URL+"raw/master/dataset_creation/lsoie_data/lsoie_data.zip"
}
_ARCHIVE_FILES = [
"lsoie_science_train.conll",
"lsoie_science_dev.conll",
"lsoie_science_test.conll",
"lsoie_wiki_train.conll",
"lsoie_wiki_dev.conll",
"lsoie_wiki_test.conll",
]
class LsoieConfig(datasets.BuilderConfig):
"""BuilderConfig for LSOIE."""
def __init__(self,subset="wiki", **kwargs):
"""BuilderConfig for LSOIE.
Args:
subset: str - either "wiki" or "science"
**kwargs: keyword arguments forwarded to super.
"""
super(LsoieConfig, self).__init__(**kwargs)
self.subset=subset
class Lsoie(datasets.GeneratorBasedBuilder):
"""LSOIE: A Large-Scale Dataset for Supervised Open Information Extraction"""
BUILDER_CONFIGS = [
LsoieConfig(
name="wiki",
description="LSOIE dataset from wikipedia and wikinews",
subset="wiki",
),
LsoieConfig(
name="sci",
description="LSOIE dataset build over scientific domain",
subset="science",
),
]
DEFAULT_CONFIG_NAME = "wiki"
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"word_ids": datasets.Sequence(datasets.Value("int16")),
"words": datasets.Sequence(datasets.Value("string")),
"pred": datasets.Value("string"),
"pred_ids": datasets.Sequence(datasets.Value("int16")),
"head_pred_id": datasets.Value("int16"),
"sent_id": datasets.Value("int16"),
"run_id": datasets.Value("int16"),
"label": datasets.Sequence(datasets.Value("string")),
}
),
supervised_keys=SupervisedKeysData(input="word_ids",output="label"),
homepage=_URL,
citation=_CITATION,
#there is no default task for open information extraction yet
#task_templates=[
# OpenInformationExtraction(
# question_column="question", context_column="context", answers_column="answers"
# )
#],
)
def _split_generators(self, dl_manager):
downloaded_archive = dl_manager.download(_URLS)['zip']
#name_pre=os.path.join("lsoie_data","lsoie_")+self.config.subset+"_"
name_pre="lsoie_"+self.config.subset+"_"
return [
datasets.SplitGenerator(name=datasets.Split.TRAIN,
gen_kwargs={
"archive_path": downloaded_archive,
"file_name": name_pre+"train.conll",
}),
datasets.SplitGenerator(name=datasets.Split.VALIDATION,
gen_kwargs={
"archive_path": downloaded_archive,
"file_name": name_pre+"dev.conll",
}),
datasets.SplitGenerator(name=datasets.Split.TEST,
gen_kwargs={
"archive_path": downloaded_archive,
"file_name": name_pre+"test.conll",
}),
]
def _generate_examples(self,archive_path,file_name):
"""This functions returns the samples in a raw format"""
logger.info("generating examples from archive:{}".format(archive_path))
columns={'word_ids':int,
'words':str,
'pred':str,
'pred_ids':lambda x: [ num for num in x.strip('[]').split(',')],
'head_pred_id': int,
'sent_id':int,
'run_id': int,
'label':str}
list_columns=["word_ids","words","label"]
sep="\t"
key=0
sentence=dict()
for column in list_columns:
sentence[column]=[]
with ZipFile(archive_path) as zipfile:
with zipfile.open('lsoie_data/'+file_name,mode='r') as file:
for line in file:
line=line.decode("utf-8").strip('\n').split(sep=sep)
if line[0]=='':
yield key, sentence
key+=1
for column in list_columns:
sentence[column]=[]
continue
for column, val in zip(columns.keys(),line):
val=columns[column](val)
if column in list_columns:
sentence[column].append(val)
else:
sentence[column]=val