linxy commited on
Commit
5ea5860
1 Parent(s): 6063c77

Update ICEWS14.py

Browse files
Files changed (1) hide show
  1. ICEWS14.py +363 -362
ICEWS14.py CHANGED
@@ -1,363 +1,364 @@
1
- # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # TODO: Address all TODOs and remove all explanatory comments
15
- """
16
- TL;DR: The datasets for temporal knowledge graph reasoning task.
17
-
18
- [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
19
- [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
20
- [[arXiv]](https://arxiv.org/abs/2205.14307)
21
-
22
- - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
23
- - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
24
- - Please refer to the original paper for more details.
25
-
26
-
27
- """
28
- from dataclasses import dataclass
29
- from typing import List, Dict, Set, Optional, TypedDict
30
- import json
31
- import os
32
-
33
- import datasets
34
-
35
-
36
- _CITATION = """\
37
- @inproceedings{
38
- xueyuan2023tflex,
39
- title={TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph},
40
- author={Lin Xueyuan and Haihong E and Chengjin Xu and Gengxian Zhou and Haoran Luo and Tianyi Hu and Fenglong Su and Ningyuan Li and Mingzhi Sun},
41
- booktitle={Thirty-seventh Conference on Neural Information Processing Systems},
42
- year={2023},
43
- url={https://openreview.net/forum?id=oaGdsgB18L}
44
- }\
45
- """
46
-
47
- # TODO: Add description of the dataset here
48
- _DESCRIPTION = """\
49
- TL;DR: The datasets for temporal knowledge graph reasoning task.
50
-
51
- [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
52
- [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
53
- [[arXiv]](https://arxiv.org/abs/2205.14307)
54
-
55
- - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
56
- - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
57
- - Please refer to the original paper for more details.
58
- """
59
-
60
- _HOMEPAGE = "https://github.com/LinXueyuanStdio/TFLEX"
61
-
62
- _LICENSE = "[Apache License 2.0](https://github.com/LinXueyuanStdio/TFLEX/blob/main/LICENSE)"
63
-
64
- query_name_to_args: Dict[str, List[str]] = {
65
- # 1. 1-hop Pe and Pt, manually
66
- "Pe": ['e1', 'r1', 't1'],
67
- "Pt": ['e1', 'r1', 'e2'],
68
- # 2. entity multi-hop
69
- "Pe2": ['e1', 'r1', 't1', 'r2', 't2'],
70
- "Pe3": ['e1', 'r1', 't1', 'r2', 't2', 'r3', 't3'],
71
- # 3. time multi-hop
72
- "aPt": ['s', 'r', 'o'],
73
- "bPt": ['s', 'r', 'o'],
74
- "Pt_sPe": ['e1', 'r1', 't1', 'r2', 'e2'],
75
- "Pt_oPe": ['e1', 'r1', 'e2', 'r2', 't1'],
76
- "Pe_Pt": ['e1', 'r1', 'e2', 'r2', 'e3'],
77
- "Pe_aPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
78
- "Pe_bPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
79
- "Pe_nPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
80
- "Pt_sPe_Pt": ['s1', 'r1', 's2', 'r2', 'o1', 'r3', 'o2'],
81
- "Pt_oPe_Pt": ['s1', 'r1', 's2', 'r2', 's3', 'r3', 'o1'],
82
- # 4. entity and & time and
83
- "e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
84
- "e3i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
85
- "t2i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
86
- "t3i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
87
- # 5. complex time and
88
- "e2i_Pe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
89
- "Pe_e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
90
- "Pt_se2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 'e3'],
91
- "Pt_oe2i": ['e1', 'r1', 'e2', 'r2', 't1', 'e3', 'r3', 't2'],
92
- "t2i_Pe": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
93
- "Pe_t2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
94
- "Pe_at2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
95
- "Pe_bt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
96
- "Pe_nt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
97
- "between": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
98
- # 5. entity not
99
- "e2i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
100
- "e3i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
101
- "Pe_e2i_Pe_NPe": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
102
- "e2i_NPe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
103
- "e2i_PeN": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
104
- # 6. time not
105
- "t2i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
106
- "t3i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
107
- "Pe_t2i_PtPe_NPt": ['e1', 'r1', 'e2', 'r2', 't2', 'r3', 'e3', 'e4', 'r4', 'e5'],
108
- "t2i_NPt": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
109
- "t2i_PtN": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
110
- # 7. entity union & time union
111
- "e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
112
- "Pe_e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
113
- "t2u": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
114
- "Pe_t2u": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
115
- }
116
- query_structures: Dict[str, str] = {
117
- # 1. 1-hop Pe and Pt, manually
118
- "Pe": "def Pe(e1, r1, t1): return Pe(e1, r1, t1)", # 1p
119
- "Pt": "def Pt(e1, r1, e2): return Pt(e1, r1, e2)", # 1p, temporal
120
- # 2. entity multi-hop
121
- "Pe2": "def Pe2(e1, r1, t1, r2, t2): return Pe(Pe(e1, r1, t1), r2, t2)", # 2p
122
- "Pe3": "def Pe3(e1, r1, t1, r2, t2, r3, t3): return Pe(Pe(Pe(e1, r1, t1), r2, t2), r3, t3)", # 3p
123
- # 3. time multi-hop
124
- "aPt": "def aPt(s, r, o): return after(Pt(s, r, o))", # a for after
125
- "bPt": "def bPt(s, r, o): return before(Pt(s, r, o))", # b for before
126
- "Pt_lPe": "def Pt_lPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
127
- "Pt_rPe": "def Pt_rPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
128
- "Pt_sPe": "def Pt_sPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
129
- "Pt_oPe": "def Pt_oPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
130
- "Pe_Pt": "def Pe_Pt(e1, r1, e2, r2, e3): return Pe(e1, r1, Pt(e2, r2, e3))", # at
131
- "Pe_aPt": "def Pe_aPt(e1, r1, e2, r2, e3): return Pe(e1, r1, after(Pt(e2, r2, e3)))", # a for after
132
- "Pe_bPt": "def Pe_bPt(e1, r1, e2, r2, e3): return Pe(e1, r1, before(Pt(e2, r2, e3)))", # b for before
133
- "Pe_nPt": "def Pe_nPt(e1, r1, e2, r2, e3): return Pe(e1, r1, next(Pt(e2, r2, e3)))", # n for next
134
- "Pt_sPe_Pt": "def Pt_sPe_Pt(s1, r1, s2, r2, o1, r3, o2): return Pt(Pe(s1, r1, Pt(s2, r2, o1)), r3, o2)",
135
- "Pt_oPe_Pt": "def Pt_oPe_Pt(s1, r1, s2, r2, s3, r3, o1): return Pt(s1, r1, Pe(s2, r2, Pt(s3, r3, o1)))",
136
- # 4. entity and & time and
137
- "e2i": "def e2i(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2i
138
- "e3i": "def e3i(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Pe(e3, r3, t3))", # 3i
139
- "t2i": "def t2i(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2i
140
- "t3i": "def t3i(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), Pt(e5, r3, e6))", # t-3i
141
- # 5. complex time and
142
- "e2i_Pe": "def e2i_Pe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Pe(e2, r3, t3))", # pi
143
- "Pe_e2i": "def Pe_e2i(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(e2i(e1, r1, t1, e2, r2, t2), r3, t3)", # ip
144
- "Pt_le2i": "def Pt_le2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
145
- "Pt_re2i": "def Pt_re2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
146
- "Pt_se2i": "def Pt_se2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
147
- "Pt_oe2i": "def Pt_oe2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
148
- "t2i_Pe": "def t2i_Pe(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), Pt(e3, r3, e4))", # t-pi
149
- "Pe_t2i": "def Pe_t2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, t2i(e2, r2, e3, e4, r3, e5))", # t-ip
150
- "Pe_at2i": "def Pe_at2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, after(t2i(e2, r2, e3, e4, r3, e5)))",
151
- "Pe_bt2i": "def Pe_bt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, before(t2i(e2, r2, e3, e4, r3, e5)))",
152
- "Pe_nt2i": "def Pe_nt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, next(t2i(e2, r2, e3, e4, r3, e5)))",
153
- "between": "def between(e1, r1, e2, e3, r2, e4): return TimeAnd(after(Pt(e1, r1, e2)), before(Pt(e3, r2, e4)))", # between(t1, t2) == after t1 and before t2
154
- # 5. entity not
155
- "e2i_N": "def e2i_N(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2)))", # 2in
156
- "e3i_N": "def e3i_N(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Not(Pe(e3, r3, t3)))", # 3in
157
- "Pe_e2i_Pe_NPe": "def Pe_e2i_Pe_NPe(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2))), r3, t3)", # inp
158
- "e2i_PeN": "def e2i_PeN(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Not(Pe(e2, r3, t3)))", # pin
159
- "e2i_NPe": "def e2i_NPe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Not(Pe(Pe(e1, r1, t1), r2, t2)), Pe(e2, r3, t3))", # pni = e2i_N(Pe(e1, r1, t1), r2, t2, e2, r3, t3)
160
- # 6. time not
161
- "t2i_N": "def t2i_N(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), TimeNot(Pt(e3, r2, e4)))", # t-2in
162
- "t3i_N": "def t3i_N(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), TimeNot(Pt(e5, r3, e6)))", # t-3in
163
- "Pe_t2i_PtPe_NPt": "def Pe_t2i_PtPe_NPt(e1, r1, e2, r2, t2, r3, e3, e4, r4, e5): return Pe(e1, r1, TimeAnd(Pt(Pe(e2, r2, t2), r3, e3), TimeNot(Pt(e4, r4, e5))))", # t-inp
164
- "t2i_PtN": "def t2i_PtN(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), TimeNot(Pt(e3, r3, e4)))", # t-pin
165
- "t2i_NPt": "def t2i_NPt(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(TimeNot(Pt(Pe(e1, r1, t1), r2, e2)), Pt(e3, r3, e4))", # t-pni
166
- # 7. entity union & time union
167
- "e2u": "def e2u(e1, r1, t1, e2, r2, t2): return Or(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2u
168
- "Pe_e2u": "def Pe_e2u(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Or(Pe(e1, r1, t1), Pe(e2, r2, t2)), r3, t3)", # up
169
- "t2u": "def t2u(e1, r1, e2, e3, r2, e4): return TimeOr(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2u
170
- "Pe_t2u": "def Pe_t2u(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeOr(Pt(e2, r2, e3), Pt(e4, r3, e5)))", # t-up
171
- # 8. union-DM
172
- "e2u_DM": "def e2u_DM(e1, r1, t1, e2, r2, t2): return Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2))))", # 2u-DM
173
- "Pe_e2u_DM": "def Pe_e2u_DM(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2)))), r3, t3)", # up-DM
174
- "t2u_DM": "def t2u_DM(e1, r1, e2, e3, r2, e4): return TimeNot(TimeAnd(TimeNot(Pt(e1, r1, e2)), TimeNot(Pt(e3, r2, e4))))", # t-2u-DM
175
- "Pe_t2u_DM": "def Pe_t2u_DM(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeNot(TimeAnd(TimeNot(Pt(e2, r2, e3)), TimeNot(Pt(e4, r3, e5)))))", # t-up-DM
176
- # 9. union-DNF
177
- "e2u_DNF": "def e2u_DNF(e1, r1, t1, e2, r2, t2): return Pe(e1, r1, t1), Pe(e2, r2, t2)", # 2u_DNF
178
- "Pe_e2u_DNF": "def Pe_e2u_DNF(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Pe(e1, r1, t1), r3, t3), Pe(Pe(e2, r2, t2), r3, t3)", # up_DNF
179
- "t2u_DNF": "def t2u_DNF(e1, r1, e2, e3, r2, e4): return Pt(e1, r1, e2), Pt(e3, r2, e4)", # t-2u_DNF
180
- "Pe_t2u_DNF": "def Pe_t2u_DNF(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, Pt(e2, r2, e3)), Pe(e1, r1, Pt(e4, r3, e5))", # t-up_DNF
181
- }
182
- union_query_structures: List[str] = [
183
- "e2u", "Pe_e2u", # 2u, up
184
- "t2u", "Pe_t2u", # t-2u, t-up
185
- ]
186
- train_query_structures: List[str] = [
187
- # entity
188
- "Pe", "Pe2", "Pe3", "e2i", "e3i", # 1p, 2p, 3p, 2i, 3i
189
- "e2i_NPe", "e2i_PeN", "Pe_e2i_Pe_NPe", "e2i_N", "e3i_N", # npi, pni, inp, 2in, 3in
190
- # time
191
- "Pt", "Pt_lPe", "Pt_rPe", "Pe_Pt", "Pe_aPt", "Pe_bPt", "Pe_nPt", # t-1p, t-2p
192
- "t2i", "t3i", "Pt_le2i", "Pt_re2i", "Pe_t2i", "Pe_at2i", "Pe_bt2i", "Pe_nt2i", "between", # t-2i, t-3i
193
- "t2i_NPt", "t2i_PtN", "Pe_t2i_PtPe_NPt", "t2i_N", "t3i_N", # t-npi, t-pni, t-inp, t-2in, t-3in
194
- ]
195
- test_query_structures: List[str] = train_query_structures + [
196
- # entity
197
- "e2i_Pe", "Pe_e2i", # pi, ip
198
- "e2u", "Pe_e2u", # 2u, up
199
- # time
200
- "t2i_Pe", "Pe_t2i", # t-pi, t-ip
201
- "t2u", "Pe_t2u", # t-2u, t-up
202
- # union-DM
203
- "e2u_DM", "Pe_e2u_DM", # 2u-DM, up-DM
204
- "t2u_DM", "Pe_t2u_DM", # t-2u-DM, t-up-DM
205
- ]
206
-
207
-
208
- # TODO: Add link to the official dataset URLs here
209
- # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
210
- # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
211
- _HOST = "https://huggingface.co"
212
- _AUTHOR = "linxy59"
213
- _DATASET = "ICEWS14"
214
- _URLS = {
215
- name: f"{_HOST}/{_AUTHOR}/{_DATASET}/zips/{name}.zip"
216
- for name in ["all"] + list(query_name_to_args.keys())
217
- }
218
-
219
- class QueryData(TypedDict):
220
- """
221
- saved in training split: query_name, query, answer
222
- saved in valid or test split: query_name, query, answer, easy_answer
223
- iterating training dataloader: query_name, query, answer, args, definition
224
- iterating valid or test dataloader: query_name, query, answer, easy_answer, args, definition
225
- """
226
- query_name: str
227
- query: List[int]
228
- answer: Set[int]
229
- easy_answer: Optional[Set[int]] = None # may be empty, indicating that no easy answer exists in training graph.
230
- args: Optional[List[str]] = None
231
- definition: Optional[str] = None
232
-
233
- @dataclass
234
- class TKGRBuilderConfig(datasets.BuilderConfig):
235
- """BuilderConfig for TKGR (Temporal Knowledge Graph Reasoning)."""
236
- query_structure_name: str = "default"
237
-
238
- class ICEWS14Dataset(datasets.GeneratorBasedBuilder):
239
- """TODO: Short description of my dataset."""
240
-
241
- VERSION = datasets.Version("1.0.0")
242
-
243
- # This is an example of a dataset with multiple configurations.
244
- # If you don't want/need to define several sub-sets in your dataset,
245
- # just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
246
-
247
- # If you need to make complex sub-parts in the datasets with configurable options
248
- # You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
249
- # BUILDER_CONFIG_CLASS = MyBuilderConfig
250
-
251
- # You will be able to load one or the other configurations in the following list with
252
- # data = datasets.load_dataset('my_dataset', 'first_domain')
253
- # data = datasets.load_dataset('my_dataset', 'second_domain')
254
- STANDARD_BUILDER_CONFIGS = [
255
- datasets.BuilderConfig(
256
- name=query_name,
257
- version=datasets.Version("1.0.0"),
258
- description=query_structures[query_name],
259
- )
260
- for query_name in list(query_name_to_args.keys())
261
- ]
262
- BUILDER_CONFIGS = [
263
- datasets.BuilderConfig(
264
- name="all",
265
- version=VERSION,
266
- description=f"All types of queries. Train: {train_query_structures}, Valid | Test: {test_query_structures}",
267
- )
268
- ] + STANDARD_BUILDER_CONFIGS
269
-
270
- DEFAULT_CONFIG_NAME = "all" # It's not mandatory to have a default configuration. Just use one if it make sense.
271
-
272
- def _info(self):
273
- if self.config.name == "all": # This is the name of the configuration selected in BUILDER_CONFIGS above
274
- features = datasets.Features(
275
- {
276
- "query_name": datasets.Value("string"),
277
- "definition": datasets.Value("string"),
278
- "query": datasets.Sequence(feature=datasets.Value("int32")),
279
- "answer": datasets.Sequence(feature=datasets.Value("int32")),
280
- "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
281
- "args": datasets.Sequence(feature=datasets.Value("string")),
282
- }
283
- )
284
- else:
285
- features = datasets.Features(
286
- {
287
- "query_name": datasets.Value("string"),
288
- "definition": datasets.Value("string"),
289
- "query": datasets.Sequence(feature=datasets.Value("int32")),
290
- "answer": datasets.Sequence(feature=datasets.Value("int32")),
291
- "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
292
- "args": datasets.Sequence(feature=datasets.Value("string")),
293
- }
294
- )
295
- return datasets.DatasetInfo(
296
- description=_DESCRIPTION,
297
- features=features,
298
- homepage=_HOMEPAGE,
299
- license=_LICENSE,
300
- citation=_CITATION,
301
- )
302
-
303
- def _split_generators(self, dl_manager):
304
- # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
305
- # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
306
- # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
307
- urls = _URLS[self.config.name]
308
- data_dir = dl_manager.download_and_extract(urls)
309
- return [
310
- datasets.SplitGenerator(
311
- name=datasets.Split.TRAIN,
312
- # These kwargs will be passed to _generate_examples
313
- gen_kwargs={
314
- "filepath": os.path.join(data_dir, "train.jsonl"),
315
- "split": "train",
316
- },
317
- ),
318
- datasets.SplitGenerator(
319
- name=datasets.Split.VALIDATION,
320
- # These kwargs will be passed to _generate_examples
321
- gen_kwargs={
322
- "filepath": os.path.join(data_dir, "valid.jsonl"),
323
- "split": "valid",
324
- },
325
- ),
326
- datasets.SplitGenerator(
327
- name=datasets.Split.TEST,
328
- # These kwargs will be passed to _generate_examples
329
- gen_kwargs={
330
- "filepath": os.path.join(data_dir, "test.jsonl"),
331
- "split": "test"
332
- },
333
- ),
334
- ]
335
-
336
- def _generate_examples(self, filepath, split):
337
- # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
338
- # This method yields (key, example) tuples from the dataset.
339
- # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
340
- if not os.path.exists(filepath):
341
- return
342
- with open(filepath, encoding="utf-8") as f:
343
- for key, row in enumerate(f):
344
- data = json.loads(row)
345
- query_name = data["query_name"]
346
- if self.config.name == "all":
347
- yield key, {
348
- "query_name": query_name,
349
- "query": data["query"],
350
- "answer": data["answer"],
351
- "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
352
- "args": query_name_to_args[query_name],
353
- "definition": query_structures[query_name],
354
- }
355
- else:
356
- yield key, {
357
- "query_name": query_name,
358
- "query": data["query"],
359
- "answer": data["answer"],
360
- "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
361
- "args": query_name_to_args[query_name],
362
- "definition": query_structures[query_name],
 
363
  }
 
1
+ # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # TODO: Address all TODOs and remove all explanatory comments
15
+ """
16
+ TL;DR: The datasets for temporal knowledge graph reasoning task.
17
+
18
+ [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
19
+ [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
20
+ [[arXiv]](https://arxiv.org/abs/2205.14307)
21
+
22
+ - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
23
+ - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
24
+ - Please refer to the original paper for more details.
25
+
26
+
27
+ """
28
+ from dataclasses import dataclass
29
+ from typing import List, Dict, Set, Optional, TypedDict
30
+ import json
31
+ import os
32
+
33
+ import datasets
34
+
35
+
36
+ _CITATION = """\
37
+ @inproceedings{
38
+ xueyuan2023tflex,
39
+ title={TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph},
40
+ author={Lin Xueyuan and Haihong E and Chengjin Xu and Gengxian Zhou and Haoran Luo and Tianyi Hu and Fenglong Su and Ningyuan Li and Mingzhi Sun},
41
+ booktitle={Thirty-seventh Conference on Neural Information Processing Systems},
42
+ year={2023},
43
+ url={https://openreview.net/forum?id=oaGdsgB18L}
44
+ }\
45
+ """
46
+
47
+ # TODO: Add description of the dataset here
48
+ _DESCRIPTION = """\
49
+ TL;DR: The datasets for temporal knowledge graph reasoning task.
50
+
51
+ [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
52
+ [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
53
+ [[arXiv]](https://arxiv.org/abs/2205.14307)
54
+
55
+ - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
56
+ - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
57
+ - Please refer to the original paper for more details.
58
+ """
59
+
60
+ _HOMEPAGE = "https://github.com/LinXueyuanStdio/TFLEX"
61
+
62
+ _LICENSE = "[Apache License 2.0](https://github.com/LinXueyuanStdio/TFLEX/blob/main/LICENSE)"
63
+
64
+ query_name_to_args: Dict[str, List[str]] = {
65
+ # 1. 1-hop Pe and Pt, manually
66
+ "Pe": ['e1', 'r1', 't1'],
67
+ "Pt": ['e1', 'r1', 'e2'],
68
+ # 2. entity multi-hop
69
+ "Pe2": ['e1', 'r1', 't1', 'r2', 't2'],
70
+ "Pe3": ['e1', 'r1', 't1', 'r2', 't2', 'r3', 't3'],
71
+ # 3. time multi-hop
72
+ "aPt": ['s', 'r', 'o'],
73
+ "bPt": ['s', 'r', 'o'],
74
+ "Pt_sPe": ['e1', 'r1', 't1', 'r2', 'e2'],
75
+ "Pt_oPe": ['e1', 'r1', 'e2', 'r2', 't1'],
76
+ "Pe_Pt": ['e1', 'r1', 'e2', 'r2', 'e3'],
77
+ "Pe_aPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
78
+ "Pe_bPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
79
+ "Pe_nPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
80
+ "Pt_sPe_Pt": ['s1', 'r1', 's2', 'r2', 'o1', 'r3', 'o2'],
81
+ "Pt_oPe_Pt": ['s1', 'r1', 's2', 'r2', 's3', 'r3', 'o1'],
82
+ # 4. entity and & time and
83
+ "e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
84
+ "e3i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
85
+ "t2i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
86
+ "t3i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
87
+ # 5. complex time and
88
+ "e2i_Pe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
89
+ "Pe_e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
90
+ "Pt_se2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 'e3'],
91
+ "Pt_oe2i": ['e1', 'r1', 'e2', 'r2', 't1', 'e3', 'r3', 't2'],
92
+ "t2i_Pe": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
93
+ "Pe_t2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
94
+ "Pe_at2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
95
+ "Pe_bt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
96
+ "Pe_nt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
97
+ "between": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
98
+ # 5. entity not
99
+ "e2i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
100
+ "e3i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
101
+ "Pe_e2i_Pe_NPe": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
102
+ "e2i_NPe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
103
+ "e2i_PeN": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
104
+ # 6. time not
105
+ "t2i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
106
+ "t3i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
107
+ "Pe_t2i_PtPe_NPt": ['e1', 'r1', 'e2', 'r2', 't2', 'r3', 'e3', 'e4', 'r4', 'e5'],
108
+ "t2i_NPt": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
109
+ "t2i_PtN": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
110
+ # 7. entity union & time union
111
+ "e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
112
+ "Pe_e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
113
+ "t2u": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
114
+ "Pe_t2u": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
115
+ }
116
+ query_structures: Dict[str, str] = {
117
+ # 1. 1-hop Pe and Pt, manually
118
+ "Pe": "def Pe(e1, r1, t1): return Pe(e1, r1, t1)", # 1p
119
+ "Pt": "def Pt(e1, r1, e2): return Pt(e1, r1, e2)", # 1p, temporal
120
+ # 2. entity multi-hop
121
+ "Pe2": "def Pe2(e1, r1, t1, r2, t2): return Pe(Pe(e1, r1, t1), r2, t2)", # 2p
122
+ "Pe3": "def Pe3(e1, r1, t1, r2, t2, r3, t3): return Pe(Pe(Pe(e1, r1, t1), r2, t2), r3, t3)", # 3p
123
+ # 3. time multi-hop
124
+ "aPt": "def aPt(s, r, o): return after(Pt(s, r, o))", # a for after
125
+ "bPt": "def bPt(s, r, o): return before(Pt(s, r, o))", # b for before
126
+ "Pt_lPe": "def Pt_lPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
127
+ "Pt_rPe": "def Pt_rPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
128
+ "Pt_sPe": "def Pt_sPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
129
+ "Pt_oPe": "def Pt_oPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
130
+ "Pe_Pt": "def Pe_Pt(e1, r1, e2, r2, e3): return Pe(e1, r1, Pt(e2, r2, e3))", # at
131
+ "Pe_aPt": "def Pe_aPt(e1, r1, e2, r2, e3): return Pe(e1, r1, after(Pt(e2, r2, e3)))", # a for after
132
+ "Pe_bPt": "def Pe_bPt(e1, r1, e2, r2, e3): return Pe(e1, r1, before(Pt(e2, r2, e3)))", # b for before
133
+ "Pe_nPt": "def Pe_nPt(e1, r1, e2, r2, e3): return Pe(e1, r1, next(Pt(e2, r2, e3)))", # n for next
134
+ "Pt_sPe_Pt": "def Pt_sPe_Pt(s1, r1, s2, r2, o1, r3, o2): return Pt(Pe(s1, r1, Pt(s2, r2, o1)), r3, o2)",
135
+ "Pt_oPe_Pt": "def Pt_oPe_Pt(s1, r1, s2, r2, s3, r3, o1): return Pt(s1, r1, Pe(s2, r2, Pt(s3, r3, o1)))",
136
+ # 4. entity and & time and
137
+ "e2i": "def e2i(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2i
138
+ "e3i": "def e3i(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Pe(e3, r3, t3))", # 3i
139
+ "t2i": "def t2i(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2i
140
+ "t3i": "def t3i(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), Pt(e5, r3, e6))", # t-3i
141
+ # 5. complex time and
142
+ "e2i_Pe": "def e2i_Pe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Pe(e2, r3, t3))", # pi
143
+ "Pe_e2i": "def Pe_e2i(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(e2i(e1, r1, t1, e2, r2, t2), r3, t3)", # ip
144
+ "Pt_le2i": "def Pt_le2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
145
+ "Pt_re2i": "def Pt_re2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
146
+ "Pt_se2i": "def Pt_se2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
147
+ "Pt_oe2i": "def Pt_oe2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
148
+ "t2i_Pe": "def t2i_Pe(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), Pt(e3, r3, e4))", # t-pi
149
+ "Pe_t2i": "def Pe_t2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, t2i(e2, r2, e3, e4, r3, e5))", # t-ip
150
+ "Pe_at2i": "def Pe_at2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, after(t2i(e2, r2, e3, e4, r3, e5)))",
151
+ "Pe_bt2i": "def Pe_bt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, before(t2i(e2, r2, e3, e4, r3, e5)))",
152
+ "Pe_nt2i": "def Pe_nt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, next(t2i(e2, r2, e3, e4, r3, e5)))",
153
+ "between": "def between(e1, r1, e2, e3, r2, e4): return TimeAnd(after(Pt(e1, r1, e2)), before(Pt(e3, r2, e4)))", # between(t1, t2) == after t1 and before t2
154
+ # 5. entity not
155
+ "e2i_N": "def e2i_N(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2)))", # 2in
156
+ "e3i_N": "def e3i_N(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Not(Pe(e3, r3, t3)))", # 3in
157
+ "Pe_e2i_Pe_NPe": "def Pe_e2i_Pe_NPe(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2))), r3, t3)", # inp
158
+ "e2i_PeN": "def e2i_PeN(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Not(Pe(e2, r3, t3)))", # pin
159
+ "e2i_NPe": "def e2i_NPe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Not(Pe(Pe(e1, r1, t1), r2, t2)), Pe(e2, r3, t3))", # pni = e2i_N(Pe(e1, r1, t1), r2, t2, e2, r3, t3)
160
+ # 6. time not
161
+ "t2i_N": "def t2i_N(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), TimeNot(Pt(e3, r2, e4)))", # t-2in
162
+ "t3i_N": "def t3i_N(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), TimeNot(Pt(e5, r3, e6)))", # t-3in
163
+ "Pe_t2i_PtPe_NPt": "def Pe_t2i_PtPe_NPt(e1, r1, e2, r2, t2, r3, e3, e4, r4, e5): return Pe(e1, r1, TimeAnd(Pt(Pe(e2, r2, t2), r3, e3), TimeNot(Pt(e4, r4, e5))))", # t-inp
164
+ "t2i_PtN": "def t2i_PtN(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), TimeNot(Pt(e3, r3, e4)))", # t-pin
165
+ "t2i_NPt": "def t2i_NPt(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(TimeNot(Pt(Pe(e1, r1, t1), r2, e2)), Pt(e3, r3, e4))", # t-pni
166
+ # 7. entity union & time union
167
+ "e2u": "def e2u(e1, r1, t1, e2, r2, t2): return Or(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2u
168
+ "Pe_e2u": "def Pe_e2u(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Or(Pe(e1, r1, t1), Pe(e2, r2, t2)), r3, t3)", # up
169
+ "t2u": "def t2u(e1, r1, e2, e3, r2, e4): return TimeOr(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2u
170
+ "Pe_t2u": "def Pe_t2u(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeOr(Pt(e2, r2, e3), Pt(e4, r3, e5)))", # t-up
171
+ # 8. union-DM
172
+ "e2u_DM": "def e2u_DM(e1, r1, t1, e2, r2, t2): return Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2))))", # 2u-DM
173
+ "Pe_e2u_DM": "def Pe_e2u_DM(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2)))), r3, t3)", # up-DM
174
+ "t2u_DM": "def t2u_DM(e1, r1, e2, e3, r2, e4): return TimeNot(TimeAnd(TimeNot(Pt(e1, r1, e2)), TimeNot(Pt(e3, r2, e4))))", # t-2u-DM
175
+ "Pe_t2u_DM": "def Pe_t2u_DM(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeNot(TimeAnd(TimeNot(Pt(e2, r2, e3)), TimeNot(Pt(e4, r3, e5)))))", # t-up-DM
176
+ # 9. union-DNF
177
+ "e2u_DNF": "def e2u_DNF(e1, r1, t1, e2, r2, t2): return Pe(e1, r1, t1), Pe(e2, r2, t2)", # 2u_DNF
178
+ "Pe_e2u_DNF": "def Pe_e2u_DNF(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Pe(e1, r1, t1), r3, t3), Pe(Pe(e2, r2, t2), r3, t3)", # up_DNF
179
+ "t2u_DNF": "def t2u_DNF(e1, r1, e2, e3, r2, e4): return Pt(e1, r1, e2), Pt(e3, r2, e4)", # t-2u_DNF
180
+ "Pe_t2u_DNF": "def Pe_t2u_DNF(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, Pt(e2, r2, e3)), Pe(e1, r1, Pt(e4, r3, e5))", # t-up_DNF
181
+ }
182
+ union_query_structures: List[str] = [
183
+ "e2u", "Pe_e2u", # 2u, up
184
+ "t2u", "Pe_t2u", # t-2u, t-up
185
+ ]
186
+ train_query_structures: List[str] = [
187
+ # entity
188
+ "Pe", "Pe2", "Pe3", "e2i", "e3i", # 1p, 2p, 3p, 2i, 3i
189
+ "e2i_NPe", "e2i_PeN", "Pe_e2i_Pe_NPe", "e2i_N", "e3i_N", # npi, pni, inp, 2in, 3in
190
+ # time
191
+ "Pt", "Pt_lPe", "Pt_rPe", "Pe_Pt", "Pe_aPt", "Pe_bPt", "Pe_nPt", # t-1p, t-2p
192
+ "t2i", "t3i", "Pt_le2i", "Pt_re2i", "Pe_t2i", "Pe_at2i", "Pe_bt2i", "Pe_nt2i", "between", # t-2i, t-3i
193
+ "t2i_NPt", "t2i_PtN", "Pe_t2i_PtPe_NPt", "t2i_N", "t3i_N", # t-npi, t-pni, t-inp, t-2in, t-3in
194
+ ]
195
+ test_query_structures: List[str] = train_query_structures + [
196
+ # entity
197
+ "e2i_Pe", "Pe_e2i", # pi, ip
198
+ "e2u", "Pe_e2u", # 2u, up
199
+ # time
200
+ "t2i_Pe", "Pe_t2i", # t-pi, t-ip
201
+ "t2u", "Pe_t2u", # t-2u, t-up
202
+ # union-DM
203
+ "e2u_DM", "Pe_e2u_DM", # 2u-DM, up-DM
204
+ "t2u_DM", "Pe_t2u_DM", # t-2u-DM, t-up-DM
205
+ ]
206
+
207
+
208
+ # TODO: Add link to the official dataset URLs here
209
+ # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
210
+ # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
211
+ _HOST = "https://huggingface.co/datasets"
212
+ _AUTHOR = "linxy"
213
+ _DATASET = "ICEWS14"
214
+ _URLS = {
215
+ name: f"{_HOST}/{_AUTHOR}/{_DATASET}/resolve/main/zips/{name}.zip?download=true"
216
+ for name in ["all"] + list(query_name_to_args.keys())
217
+ }
218
+
219
+
220
+ class QueryData(TypedDict):
221
+ """
222
+ saved in training split: query_name, query, answer
223
+ saved in valid or test split: query_name, query, answer, easy_answer
224
+ iterating training dataloader: query_name, query, answer, args, definition
225
+ iterating valid or test dataloader: query_name, query, answer, easy_answer, args, definition
226
+ """
227
+ query_name: str
228
+ query: List[int]
229
+ answer: Set[int]
230
+ easy_answer: Optional[Set[int]] = None # may be empty, indicating that no easy answer exists in training graph.
231
+ args: Optional[List[str]] = None
232
+ definition: Optional[str] = None
233
+
234
+ @dataclass
235
+ class TKGRBuilderConfig(datasets.BuilderConfig):
236
+ """BuilderConfig for TKGR (Temporal Knowledge Graph Reasoning)."""
237
+ query_structure_name: str = "default"
238
+
239
+ class ICEWS14Dataset(datasets.GeneratorBasedBuilder):
240
+ """TODO: Short description of my dataset."""
241
+
242
+ VERSION = datasets.Version("1.0.0")
243
+
244
+ # This is an example of a dataset with multiple configurations.
245
+ # If you don't want/need to define several sub-sets in your dataset,
246
+ # just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
247
+
248
+ # If you need to make complex sub-parts in the datasets with configurable options
249
+ # You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
250
+ # BUILDER_CONFIG_CLASS = MyBuilderConfig
251
+
252
+ # You will be able to load one or the other configurations in the following list with
253
+ # data = datasets.load_dataset('my_dataset', 'first_domain')
254
+ # data = datasets.load_dataset('my_dataset', 'second_domain')
255
+ STANDARD_BUILDER_CONFIGS = [
256
+ datasets.BuilderConfig(
257
+ name=query_name,
258
+ version=datasets.Version("1.0.0"),
259
+ description=query_structures[query_name],
260
+ )
261
+ for query_name in list(query_name_to_args.keys())
262
+ ]
263
+ BUILDER_CONFIGS = [
264
+ datasets.BuilderConfig(
265
+ name="all",
266
+ version=VERSION,
267
+ description=f"All types of queries. Train: {train_query_structures}, Valid | Test: {test_query_structures}",
268
+ )
269
+ ] + STANDARD_BUILDER_CONFIGS
270
+
271
+ DEFAULT_CONFIG_NAME = "all" # It's not mandatory to have a default configuration. Just use one if it make sense.
272
+
273
+ def _info(self):
274
+ if self.config.name == "all": # This is the name of the configuration selected in BUILDER_CONFIGS above
275
+ features = datasets.Features(
276
+ {
277
+ "query_name": datasets.Value("string"),
278
+ "definition": datasets.Value("string"),
279
+ "query": datasets.Sequence(feature=datasets.Value("int32")),
280
+ "answer": datasets.Sequence(feature=datasets.Value("int32")),
281
+ "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
282
+ "args": datasets.Sequence(feature=datasets.Value("string")),
283
+ }
284
+ )
285
+ else:
286
+ features = datasets.Features(
287
+ {
288
+ "query_name": datasets.Value("string"),
289
+ "definition": datasets.Value("string"),
290
+ "query": datasets.Sequence(feature=datasets.Value("int32")),
291
+ "answer": datasets.Sequence(feature=datasets.Value("int32")),
292
+ "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
293
+ "args": datasets.Sequence(feature=datasets.Value("string")),
294
+ }
295
+ )
296
+ return datasets.DatasetInfo(
297
+ description=_DESCRIPTION,
298
+ features=features,
299
+ homepage=_HOMEPAGE,
300
+ license=_LICENSE,
301
+ citation=_CITATION,
302
+ )
303
+
304
+ def _split_generators(self, dl_manager):
305
+ # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
306
+ # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
307
+ # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
308
+ urls = _URLS[self.config.name]
309
+ data_dir = dl_manager.download_and_extract(urls)
310
+ return [
311
+ datasets.SplitGenerator(
312
+ name=datasets.Split.TRAIN,
313
+ # These kwargs will be passed to _generate_examples
314
+ gen_kwargs={
315
+ "filepath": os.path.join(data_dir, "train.jsonl"),
316
+ "split": "train",
317
+ },
318
+ ),
319
+ datasets.SplitGenerator(
320
+ name=datasets.Split.VALIDATION,
321
+ # These kwargs will be passed to _generate_examples
322
+ gen_kwargs={
323
+ "filepath": os.path.join(data_dir, "valid.jsonl"),
324
+ "split": "valid",
325
+ },
326
+ ),
327
+ datasets.SplitGenerator(
328
+ name=datasets.Split.TEST,
329
+ # These kwargs will be passed to _generate_examples
330
+ gen_kwargs={
331
+ "filepath": os.path.join(data_dir, "test.jsonl"),
332
+ "split": "test"
333
+ },
334
+ ),
335
+ ]
336
+
337
+ def _generate_examples(self, filepath, split):
338
+ # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
339
+ # This method yields (key, example) tuples from the dataset.
340
+ # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
341
+ if not os.path.exists(filepath):
342
+ return
343
+ with open(filepath, encoding="utf-8") as f:
344
+ for key, row in enumerate(f):
345
+ data = json.loads(row)
346
+ query_name = data["query_name"]
347
+ if self.config.name == "all":
348
+ yield key, {
349
+ "query_name": query_name,
350
+ "query": data["query"],
351
+ "answer": data["answer"],
352
+ "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
353
+ "args": query_name_to_args[query_name],
354
+ "definition": query_structures[query_name],
355
+ }
356
+ else:
357
+ yield key, {
358
+ "query_name": query_name,
359
+ "query": data["query"],
360
+ "answer": data["answer"],
361
+ "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
362
+ "args": query_name_to_args[query_name],
363
+ "definition": query_structures[query_name],
364
  }