|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
""" |
|
TL;DR: The datasets for temporal knowledge graph reasoning task. |
|
|
|
[[Github]](https://github.com/LinXueyuanStdio/TFLEX) |
|
[[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L) |
|
[[arXiv]](https://arxiv.org/abs/2205.14307) |
|
|
|
- Built over ICEWS and GDELT, which are widly used benchmarks in TKGC. |
|
- First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph" |
|
- Please refer to the original paper for more details. |
|
|
|
|
|
""" |
|
from dataclasses import dataclass |
|
from typing import List, Dict, Set, Optional, TypedDict |
|
import json |
|
import os |
|
|
|
import datasets |
|
|
|
|
|
_CITATION = """\ |
|
@inproceedings{ |
|
xueyuan2023tflex, |
|
title={TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph}, |
|
author={Lin Xueyuan and Haihong E and Chengjin Xu and Gengxian Zhou and Haoran Luo and Tianyi Hu and Fenglong Su and Ningyuan Li and Mingzhi Sun}, |
|
booktitle={Thirty-seventh Conference on Neural Information Processing Systems}, |
|
year={2023}, |
|
url={https://openreview.net/forum?id=oaGdsgB18L} |
|
}\ |
|
""" |
|
|
|
|
|
_DESCRIPTION = """\ |
|
TL;DR: The datasets for temporal knowledge graph reasoning task. |
|
|
|
[[Github]](https://github.com/LinXueyuanStdio/TFLEX) |
|
[[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L) |
|
[[arXiv]](https://arxiv.org/abs/2205.14307) |
|
|
|
- Built over ICEWS and GDELT, which are widly used benchmarks in TKGC. |
|
- First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph" |
|
- Please refer to the original paper for more details. |
|
""" |
|
|
|
_HOMEPAGE = "https://github.com/LinXueyuanStdio/TFLEX" |
|
|
|
_LICENSE = "[Apache License 2.0](https://github.com/LinXueyuanStdio/TFLEX/blob/main/LICENSE)" |
|
|
|
query_name_to_args: Dict[str, List[str]] = { |
|
|
|
"Pe": ['e1', 'r1', 't1'], |
|
"Pt": ['e1', 'r1', 'e2'], |
|
|
|
"Pe2": ['e1', 'r1', 't1', 'r2', 't2'], |
|
"Pe3": ['e1', 'r1', 't1', 'r2', 't2', 'r3', 't3'], |
|
|
|
"aPt": ['s', 'r', 'o'], |
|
"bPt": ['s', 'r', 'o'], |
|
"Pt_sPe": ['e1', 'r1', 't1', 'r2', 'e2'], |
|
"Pt_oPe": ['e1', 'r1', 'e2', 'r2', 't1'], |
|
"Pe_Pt": ['e1', 'r1', 'e2', 'r2', 'e3'], |
|
"Pe_aPt": ['e1', 'r1', 'e2', 'r2', 'e3'], |
|
"Pe_bPt": ['e1', 'r1', 'e2', 'r2', 'e3'], |
|
"Pe_nPt": ['e1', 'r1', 'e2', 'r2', 'e3'], |
|
"Pt_sPe_Pt": ['s1', 'r1', 's2', 'r2', 'o1', 'r3', 'o2'], |
|
"Pt_oPe_Pt": ['s1', 'r1', 's2', 'r2', 's3', 'r3', 'o1'], |
|
|
|
"e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2'], |
|
"e3i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'], |
|
"t2i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'], |
|
"t3i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'], |
|
|
|
"e2i_Pe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'], |
|
"Pe_e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'], |
|
"Pt_se2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 'e3'], |
|
"Pt_oe2i": ['e1', 'r1', 'e2', 'r2', 't1', 'e3', 'r3', 't2'], |
|
"t2i_Pe": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'], |
|
"Pe_t2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'], |
|
"Pe_at2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'], |
|
"Pe_bt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'], |
|
"Pe_nt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'], |
|
"between": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'], |
|
|
|
"e2i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2'], |
|
"e3i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'], |
|
"Pe_e2i_Pe_NPe": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'], |
|
"e2i_NPe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'], |
|
"e2i_PeN": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'], |
|
|
|
"t2i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'], |
|
"t3i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'], |
|
"Pe_t2i_PtPe_NPt": ['e1', 'r1', 'e2', 'r2', 't2', 'r3', 'e3', 'e4', 'r4', 'e5'], |
|
"t2i_NPt": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'], |
|
"t2i_PtN": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'], |
|
|
|
"e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2'], |
|
"Pe_e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'], |
|
"t2u": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'], |
|
"Pe_t2u": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'], |
|
} |
|
query_structures: Dict[str, str] = { |
|
|
|
"Pe": "def Pe(e1, r1, t1): return Pe(e1, r1, t1)", |
|
"Pt": "def Pt(e1, r1, e2): return Pt(e1, r1, e2)", |
|
|
|
"Pe2": "def Pe2(e1, r1, t1, r2, t2): return Pe(Pe(e1, r1, t1), r2, t2)", |
|
"Pe3": "def Pe3(e1, r1, t1, r2, t2, r3, t3): return Pe(Pe(Pe(e1, r1, t1), r2, t2), r3, t3)", |
|
|
|
"aPt": "def aPt(s, r, o): return after(Pt(s, r, o))", |
|
"bPt": "def bPt(s, r, o): return before(Pt(s, r, o))", |
|
"Pt_lPe": "def Pt_lPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", |
|
"Pt_rPe": "def Pt_rPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", |
|
"Pt_sPe": "def Pt_sPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", |
|
"Pt_oPe": "def Pt_oPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", |
|
"Pe_Pt": "def Pe_Pt(e1, r1, e2, r2, e3): return Pe(e1, r1, Pt(e2, r2, e3))", |
|
"Pe_aPt": "def Pe_aPt(e1, r1, e2, r2, e3): return Pe(e1, r1, after(Pt(e2, r2, e3)))", |
|
"Pe_bPt": "def Pe_bPt(e1, r1, e2, r2, e3): return Pe(e1, r1, before(Pt(e2, r2, e3)))", |
|
"Pe_nPt": "def Pe_nPt(e1, r1, e2, r2, e3): return Pe(e1, r1, next(Pt(e2, r2, e3)))", |
|
"Pt_sPe_Pt": "def Pt_sPe_Pt(s1, r1, s2, r2, o1, r3, o2): return Pt(Pe(s1, r1, Pt(s2, r2, o1)), r3, o2)", |
|
"Pt_oPe_Pt": "def Pt_oPe_Pt(s1, r1, s2, r2, s3, r3, o1): return Pt(s1, r1, Pe(s2, r2, Pt(s3, r3, o1)))", |
|
|
|
"e2i": "def e2i(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Pe(e2, r2, t2))", |
|
"e3i": "def e3i(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Pe(e3, r3, t3))", |
|
"t2i": "def t2i(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), Pt(e3, r2, e4))", |
|
"t3i": "def t3i(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), Pt(e5, r3, e6))", |
|
|
|
"e2i_Pe": "def e2i_Pe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Pe(e2, r3, t3))", |
|
"Pe_e2i": "def Pe_e2i(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(e2i(e1, r1, t1, e2, r2, t2), r3, t3)", |
|
"Pt_le2i": "def Pt_le2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", |
|
"Pt_re2i": "def Pt_re2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", |
|
"Pt_se2i": "def Pt_se2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", |
|
"Pt_oe2i": "def Pt_oe2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", |
|
"t2i_Pe": "def t2i_Pe(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), Pt(e3, r3, e4))", |
|
"Pe_t2i": "def Pe_t2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, t2i(e2, r2, e3, e4, r3, e5))", |
|
"Pe_at2i": "def Pe_at2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, after(t2i(e2, r2, e3, e4, r3, e5)))", |
|
"Pe_bt2i": "def Pe_bt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, before(t2i(e2, r2, e3, e4, r3, e5)))", |
|
"Pe_nt2i": "def Pe_nt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, next(t2i(e2, r2, e3, e4, r3, e5)))", |
|
"between": "def between(e1, r1, e2, e3, r2, e4): return TimeAnd(after(Pt(e1, r1, e2)), before(Pt(e3, r2, e4)))", |
|
|
|
"e2i_N": "def e2i_N(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2)))", |
|
"e3i_N": "def e3i_N(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Not(Pe(e3, r3, t3)))", |
|
"Pe_e2i_Pe_NPe": "def Pe_e2i_Pe_NPe(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2))), r3, t3)", |
|
"e2i_PeN": "def e2i_PeN(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Not(Pe(e2, r3, t3)))", |
|
"e2i_NPe": "def e2i_NPe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Not(Pe(Pe(e1, r1, t1), r2, t2)), Pe(e2, r3, t3))", |
|
|
|
"t2i_N": "def t2i_N(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), TimeNot(Pt(e3, r2, e4)))", |
|
"t3i_N": "def t3i_N(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), TimeNot(Pt(e5, r3, e6)))", |
|
"Pe_t2i_PtPe_NPt": "def Pe_t2i_PtPe_NPt(e1, r1, e2, r2, t2, r3, e3, e4, r4, e5): return Pe(e1, r1, TimeAnd(Pt(Pe(e2, r2, t2), r3, e3), TimeNot(Pt(e4, r4, e5))))", |
|
"t2i_PtN": "def t2i_PtN(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), TimeNot(Pt(e3, r3, e4)))", |
|
"t2i_NPt": "def t2i_NPt(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(TimeNot(Pt(Pe(e1, r1, t1), r2, e2)), Pt(e3, r3, e4))", |
|
|
|
"e2u": "def e2u(e1, r1, t1, e2, r2, t2): return Or(Pe(e1, r1, t1), Pe(e2, r2, t2))", |
|
"Pe_e2u": "def Pe_e2u(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Or(Pe(e1, r1, t1), Pe(e2, r2, t2)), r3, t3)", |
|
"t2u": "def t2u(e1, r1, e2, e3, r2, e4): return TimeOr(Pt(e1, r1, e2), Pt(e3, r2, e4))", |
|
"Pe_t2u": "def Pe_t2u(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeOr(Pt(e2, r2, e3), Pt(e4, r3, e5)))", |
|
|
|
"e2u_DM": "def e2u_DM(e1, r1, t1, e2, r2, t2): return Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2))))", |
|
"Pe_e2u_DM": "def Pe_e2u_DM(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2)))), r3, t3)", |
|
"t2u_DM": "def t2u_DM(e1, r1, e2, e3, r2, e4): return TimeNot(TimeAnd(TimeNot(Pt(e1, r1, e2)), TimeNot(Pt(e3, r2, e4))))", |
|
"Pe_t2u_DM": "def Pe_t2u_DM(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeNot(TimeAnd(TimeNot(Pt(e2, r2, e3)), TimeNot(Pt(e4, r3, e5)))))", |
|
|
|
"e2u_DNF": "def e2u_DNF(e1, r1, t1, e2, r2, t2): return Pe(e1, r1, t1), Pe(e2, r2, t2)", |
|
"Pe_e2u_DNF": "def Pe_e2u_DNF(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Pe(e1, r1, t1), r3, t3), Pe(Pe(e2, r2, t2), r3, t3)", |
|
"t2u_DNF": "def t2u_DNF(e1, r1, e2, e3, r2, e4): return Pt(e1, r1, e2), Pt(e3, r2, e4)", |
|
"Pe_t2u_DNF": "def Pe_t2u_DNF(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, Pt(e2, r2, e3)), Pe(e1, r1, Pt(e4, r3, e5))", |
|
} |
|
union_query_structures: List[str] = [ |
|
"e2u", "Pe_e2u", |
|
"t2u", "Pe_t2u", |
|
] |
|
train_query_structures: List[str] = [ |
|
|
|
"Pe", "Pe2", "Pe3", "e2i", "e3i", |
|
"e2i_NPe", "e2i_PeN", "Pe_e2i_Pe_NPe", "e2i_N", "e3i_N", |
|
|
|
"Pt", "Pt_lPe", "Pt_rPe", "Pe_Pt", "Pe_aPt", "Pe_bPt", "Pe_nPt", |
|
"t2i", "t3i", "Pt_le2i", "Pt_re2i", "Pe_t2i", "Pe_at2i", "Pe_bt2i", "Pe_nt2i", "between", |
|
"t2i_NPt", "t2i_PtN", "Pe_t2i_PtPe_NPt", "t2i_N", "t3i_N", |
|
] |
|
test_query_structures: List[str] = train_query_structures + [ |
|
|
|
"e2i_Pe", "Pe_e2i", |
|
"e2u", "Pe_e2u", |
|
|
|
"t2i_Pe", "Pe_t2i", |
|
"t2u", "Pe_t2u", |
|
|
|
"e2u_DM", "Pe_e2u_DM", |
|
"t2u_DM", "Pe_t2u_DM", |
|
] |
|
|
|
|
|
|
|
|
|
|
|
_HOST = "https://huggingface.co/datasets" |
|
_AUTHOR = "linxy" |
|
_DATASET = "ICEWS14" |
|
_URLS = { |
|
name: f"{_HOST}/{_AUTHOR}/{_DATASET}/resolve/main/zips/{name}.zip?download=true" |
|
for name in ["all"] + list(query_name_to_args.keys()) |
|
} |
|
|
|
|
|
class QueryData(TypedDict): |
|
""" |
|
saved in training split: query_name, query, answer |
|
saved in valid or test split: query_name, query, answer, easy_answer |
|
iterating training dataloader: query_name, query, answer, args, definition |
|
iterating valid or test dataloader: query_name, query, answer, easy_answer, args, definition |
|
""" |
|
query_name: str |
|
query: List[int] |
|
answer: Set[int] |
|
easy_answer: Optional[Set[int]] = None |
|
args: Optional[List[str]] = None |
|
definition: Optional[str] = None |
|
|
|
@dataclass |
|
class TKGRBuilderConfig(datasets.BuilderConfig): |
|
"""BuilderConfig for TKGR (Temporal Knowledge Graph Reasoning).""" |
|
query_structure_name: str = "default" |
|
|
|
class ICEWS14Dataset(datasets.GeneratorBasedBuilder): |
|
"""TODO: Short description of my dataset.""" |
|
|
|
VERSION = datasets.Version("1.0.0") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
STANDARD_BUILDER_CONFIGS = [ |
|
datasets.BuilderConfig( |
|
name=query_name, |
|
version=datasets.Version("1.0.0"), |
|
description=query_structures[query_name], |
|
) |
|
for query_name in list(query_name_to_args.keys()) |
|
] |
|
BUILDER_CONFIGS = [ |
|
datasets.BuilderConfig( |
|
name="all", |
|
version=VERSION, |
|
description=f"All types of queries. Train: {train_query_structures}, Valid | Test: {test_query_structures}", |
|
) |
|
] + STANDARD_BUILDER_CONFIGS |
|
|
|
DEFAULT_CONFIG_NAME = "all" |
|
|
|
def _info(self): |
|
if self.config.name == "all": |
|
features = datasets.Features( |
|
{ |
|
"query_name": datasets.Value("string"), |
|
"definition": datasets.Value("string"), |
|
"query": datasets.Sequence(feature=datasets.Value("int32")), |
|
"answer": datasets.Sequence(feature=datasets.Value("int32")), |
|
"easy_answer": datasets.Sequence(feature=datasets.Value("int32")), |
|
"args": datasets.Sequence(feature=datasets.Value("string")), |
|
} |
|
) |
|
else: |
|
features = datasets.Features( |
|
{ |
|
"query_name": datasets.Value("string"), |
|
"definition": datasets.Value("string"), |
|
"query": datasets.Sequence(feature=datasets.Value("int32")), |
|
"answer": datasets.Sequence(feature=datasets.Value("int32")), |
|
"easy_answer": datasets.Sequence(feature=datasets.Value("int32")), |
|
"args": datasets.Sequence(feature=datasets.Value("string")), |
|
} |
|
) |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=features, |
|
homepage=_HOMEPAGE, |
|
license=_LICENSE, |
|
citation=_CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
|
|
|
|
|
|
urls = _URLS[self.config.name] |
|
data_dir = dl_manager.download_and_extract(urls) |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
|
|
gen_kwargs={ |
|
"filepath": os.path.join(data_dir, "train.jsonl"), |
|
"split": "train", |
|
}, |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.VALIDATION, |
|
|
|
gen_kwargs={ |
|
"filepath": os.path.join(data_dir, "valid.jsonl"), |
|
"split": "valid", |
|
}, |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TEST, |
|
|
|
gen_kwargs={ |
|
"filepath": os.path.join(data_dir, "test.jsonl"), |
|
"split": "test" |
|
}, |
|
), |
|
] |
|
|
|
def _generate_examples(self, filepath, split): |
|
|
|
|
|
|
|
if not os.path.exists(filepath): |
|
return |
|
with open(filepath, encoding="utf-8") as f: |
|
for key, row in enumerate(f): |
|
data = json.loads(row) |
|
query_name = data["query_name"] |
|
if self.config.name == "all": |
|
yield key, { |
|
"query_name": query_name, |
|
"query": data["query"], |
|
"answer": data["answer"], |
|
"easy_answer": data["easy_answer"] if "easy_answer" in data else None, |
|
"args": query_name_to_args[query_name], |
|
"definition": query_structures[query_name], |
|
} |
|
else: |
|
yield key, { |
|
"query_name": query_name, |
|
"query": data["query"], |
|
"answer": data["answer"], |
|
"easy_answer": data["easy_answer"] if "easy_answer" in data else None, |
|
"args": query_name_to_args[query_name], |
|
"definition": query_structures[query_name], |
|
} |