linxy commited on
Commit
6063c77
1 Parent(s): 98ca975

Upload 44 files

Browse files
ICEWS14.py ADDED
@@ -0,0 +1,363 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # TODO: Address all TODOs and remove all explanatory comments
15
+ """
16
+ TL;DR: The datasets for temporal knowledge graph reasoning task.
17
+
18
+ [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
19
+ [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
20
+ [[arXiv]](https://arxiv.org/abs/2205.14307)
21
+
22
+ - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
23
+ - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
24
+ - Please refer to the original paper for more details.
25
+
26
+
27
+ """
28
+ from dataclasses import dataclass
29
+ from typing import List, Dict, Set, Optional, TypedDict
30
+ import json
31
+ import os
32
+
33
+ import datasets
34
+
35
+
36
+ _CITATION = """\
37
+ @inproceedings{
38
+ xueyuan2023tflex,
39
+ title={TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph},
40
+ author={Lin Xueyuan and Haihong E and Chengjin Xu and Gengxian Zhou and Haoran Luo and Tianyi Hu and Fenglong Su and Ningyuan Li and Mingzhi Sun},
41
+ booktitle={Thirty-seventh Conference on Neural Information Processing Systems},
42
+ year={2023},
43
+ url={https://openreview.net/forum?id=oaGdsgB18L}
44
+ }\
45
+ """
46
+
47
+ # TODO: Add description of the dataset here
48
+ _DESCRIPTION = """\
49
+ TL;DR: The datasets for temporal knowledge graph reasoning task.
50
+
51
+ [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
52
+ [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
53
+ [[arXiv]](https://arxiv.org/abs/2205.14307)
54
+
55
+ - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
56
+ - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
57
+ - Please refer to the original paper for more details.
58
+ """
59
+
60
+ _HOMEPAGE = "https://github.com/LinXueyuanStdio/TFLEX"
61
+
62
+ _LICENSE = "[Apache License 2.0](https://github.com/LinXueyuanStdio/TFLEX/blob/main/LICENSE)"
63
+
64
+ query_name_to_args: Dict[str, List[str]] = {
65
+ # 1. 1-hop Pe and Pt, manually
66
+ "Pe": ['e1', 'r1', 't1'],
67
+ "Pt": ['e1', 'r1', 'e2'],
68
+ # 2. entity multi-hop
69
+ "Pe2": ['e1', 'r1', 't1', 'r2', 't2'],
70
+ "Pe3": ['e1', 'r1', 't1', 'r2', 't2', 'r3', 't3'],
71
+ # 3. time multi-hop
72
+ "aPt": ['s', 'r', 'o'],
73
+ "bPt": ['s', 'r', 'o'],
74
+ "Pt_sPe": ['e1', 'r1', 't1', 'r2', 'e2'],
75
+ "Pt_oPe": ['e1', 'r1', 'e2', 'r2', 't1'],
76
+ "Pe_Pt": ['e1', 'r1', 'e2', 'r2', 'e3'],
77
+ "Pe_aPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
78
+ "Pe_bPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
79
+ "Pe_nPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
80
+ "Pt_sPe_Pt": ['s1', 'r1', 's2', 'r2', 'o1', 'r3', 'o2'],
81
+ "Pt_oPe_Pt": ['s1', 'r1', 's2', 'r2', 's3', 'r3', 'o1'],
82
+ # 4. entity and & time and
83
+ "e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
84
+ "e3i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
85
+ "t2i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
86
+ "t3i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
87
+ # 5. complex time and
88
+ "e2i_Pe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
89
+ "Pe_e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
90
+ "Pt_se2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 'e3'],
91
+ "Pt_oe2i": ['e1', 'r1', 'e2', 'r2', 't1', 'e3', 'r3', 't2'],
92
+ "t2i_Pe": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
93
+ "Pe_t2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
94
+ "Pe_at2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
95
+ "Pe_bt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
96
+ "Pe_nt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
97
+ "between": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
98
+ # 5. entity not
99
+ "e2i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
100
+ "e3i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
101
+ "Pe_e2i_Pe_NPe": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
102
+ "e2i_NPe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
103
+ "e2i_PeN": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
104
+ # 6. time not
105
+ "t2i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
106
+ "t3i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
107
+ "Pe_t2i_PtPe_NPt": ['e1', 'r1', 'e2', 'r2', 't2', 'r3', 'e3', 'e4', 'r4', 'e5'],
108
+ "t2i_NPt": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
109
+ "t2i_PtN": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
110
+ # 7. entity union & time union
111
+ "e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
112
+ "Pe_e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
113
+ "t2u": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
114
+ "Pe_t2u": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
115
+ }
116
+ query_structures: Dict[str, str] = {
117
+ # 1. 1-hop Pe and Pt, manually
118
+ "Pe": "def Pe(e1, r1, t1): return Pe(e1, r1, t1)", # 1p
119
+ "Pt": "def Pt(e1, r1, e2): return Pt(e1, r1, e2)", # 1p, temporal
120
+ # 2. entity multi-hop
121
+ "Pe2": "def Pe2(e1, r1, t1, r2, t2): return Pe(Pe(e1, r1, t1), r2, t2)", # 2p
122
+ "Pe3": "def Pe3(e1, r1, t1, r2, t2, r3, t3): return Pe(Pe(Pe(e1, r1, t1), r2, t2), r3, t3)", # 3p
123
+ # 3. time multi-hop
124
+ "aPt": "def aPt(s, r, o): return after(Pt(s, r, o))", # a for after
125
+ "bPt": "def bPt(s, r, o): return before(Pt(s, r, o))", # b for before
126
+ "Pt_lPe": "def Pt_lPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
127
+ "Pt_rPe": "def Pt_rPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
128
+ "Pt_sPe": "def Pt_sPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
129
+ "Pt_oPe": "def Pt_oPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
130
+ "Pe_Pt": "def Pe_Pt(e1, r1, e2, r2, e3): return Pe(e1, r1, Pt(e2, r2, e3))", # at
131
+ "Pe_aPt": "def Pe_aPt(e1, r1, e2, r2, e3): return Pe(e1, r1, after(Pt(e2, r2, e3)))", # a for after
132
+ "Pe_bPt": "def Pe_bPt(e1, r1, e2, r2, e3): return Pe(e1, r1, before(Pt(e2, r2, e3)))", # b for before
133
+ "Pe_nPt": "def Pe_nPt(e1, r1, e2, r2, e3): return Pe(e1, r1, next(Pt(e2, r2, e3)))", # n for next
134
+ "Pt_sPe_Pt": "def Pt_sPe_Pt(s1, r1, s2, r2, o1, r3, o2): return Pt(Pe(s1, r1, Pt(s2, r2, o1)), r3, o2)",
135
+ "Pt_oPe_Pt": "def Pt_oPe_Pt(s1, r1, s2, r2, s3, r3, o1): return Pt(s1, r1, Pe(s2, r2, Pt(s3, r3, o1)))",
136
+ # 4. entity and & time and
137
+ "e2i": "def e2i(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2i
138
+ "e3i": "def e3i(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Pe(e3, r3, t3))", # 3i
139
+ "t2i": "def t2i(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2i
140
+ "t3i": "def t3i(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), Pt(e5, r3, e6))", # t-3i
141
+ # 5. complex time and
142
+ "e2i_Pe": "def e2i_Pe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Pe(e2, r3, t3))", # pi
143
+ "Pe_e2i": "def Pe_e2i(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(e2i(e1, r1, t1, e2, r2, t2), r3, t3)", # ip
144
+ "Pt_le2i": "def Pt_le2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
145
+ "Pt_re2i": "def Pt_re2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
146
+ "Pt_se2i": "def Pt_se2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
147
+ "Pt_oe2i": "def Pt_oe2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
148
+ "t2i_Pe": "def t2i_Pe(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), Pt(e3, r3, e4))", # t-pi
149
+ "Pe_t2i": "def Pe_t2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, t2i(e2, r2, e3, e4, r3, e5))", # t-ip
150
+ "Pe_at2i": "def Pe_at2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, after(t2i(e2, r2, e3, e4, r3, e5)))",
151
+ "Pe_bt2i": "def Pe_bt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, before(t2i(e2, r2, e3, e4, r3, e5)))",
152
+ "Pe_nt2i": "def Pe_nt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, next(t2i(e2, r2, e3, e4, r3, e5)))",
153
+ "between": "def between(e1, r1, e2, e3, r2, e4): return TimeAnd(after(Pt(e1, r1, e2)), before(Pt(e3, r2, e4)))", # between(t1, t2) == after t1 and before t2
154
+ # 5. entity not
155
+ "e2i_N": "def e2i_N(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2)))", # 2in
156
+ "e3i_N": "def e3i_N(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Not(Pe(e3, r3, t3)))", # 3in
157
+ "Pe_e2i_Pe_NPe": "def Pe_e2i_Pe_NPe(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2))), r3, t3)", # inp
158
+ "e2i_PeN": "def e2i_PeN(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Not(Pe(e2, r3, t3)))", # pin
159
+ "e2i_NPe": "def e2i_NPe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Not(Pe(Pe(e1, r1, t1), r2, t2)), Pe(e2, r3, t3))", # pni = e2i_N(Pe(e1, r1, t1), r2, t2, e2, r3, t3)
160
+ # 6. time not
161
+ "t2i_N": "def t2i_N(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), TimeNot(Pt(e3, r2, e4)))", # t-2in
162
+ "t3i_N": "def t3i_N(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), TimeNot(Pt(e5, r3, e6)))", # t-3in
163
+ "Pe_t2i_PtPe_NPt": "def Pe_t2i_PtPe_NPt(e1, r1, e2, r2, t2, r3, e3, e4, r4, e5): return Pe(e1, r1, TimeAnd(Pt(Pe(e2, r2, t2), r3, e3), TimeNot(Pt(e4, r4, e5))))", # t-inp
164
+ "t2i_PtN": "def t2i_PtN(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), TimeNot(Pt(e3, r3, e4)))", # t-pin
165
+ "t2i_NPt": "def t2i_NPt(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(TimeNot(Pt(Pe(e1, r1, t1), r2, e2)), Pt(e3, r3, e4))", # t-pni
166
+ # 7. entity union & time union
167
+ "e2u": "def e2u(e1, r1, t1, e2, r2, t2): return Or(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2u
168
+ "Pe_e2u": "def Pe_e2u(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Or(Pe(e1, r1, t1), Pe(e2, r2, t2)), r3, t3)", # up
169
+ "t2u": "def t2u(e1, r1, e2, e3, r2, e4): return TimeOr(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2u
170
+ "Pe_t2u": "def Pe_t2u(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeOr(Pt(e2, r2, e3), Pt(e4, r3, e5)))", # t-up
171
+ # 8. union-DM
172
+ "e2u_DM": "def e2u_DM(e1, r1, t1, e2, r2, t2): return Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2))))", # 2u-DM
173
+ "Pe_e2u_DM": "def Pe_e2u_DM(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2)))), r3, t3)", # up-DM
174
+ "t2u_DM": "def t2u_DM(e1, r1, e2, e3, r2, e4): return TimeNot(TimeAnd(TimeNot(Pt(e1, r1, e2)), TimeNot(Pt(e3, r2, e4))))", # t-2u-DM
175
+ "Pe_t2u_DM": "def Pe_t2u_DM(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeNot(TimeAnd(TimeNot(Pt(e2, r2, e3)), TimeNot(Pt(e4, r3, e5)))))", # t-up-DM
176
+ # 9. union-DNF
177
+ "e2u_DNF": "def e2u_DNF(e1, r1, t1, e2, r2, t2): return Pe(e1, r1, t1), Pe(e2, r2, t2)", # 2u_DNF
178
+ "Pe_e2u_DNF": "def Pe_e2u_DNF(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Pe(e1, r1, t1), r3, t3), Pe(Pe(e2, r2, t2), r3, t3)", # up_DNF
179
+ "t2u_DNF": "def t2u_DNF(e1, r1, e2, e3, r2, e4): return Pt(e1, r1, e2), Pt(e3, r2, e4)", # t-2u_DNF
180
+ "Pe_t2u_DNF": "def Pe_t2u_DNF(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, Pt(e2, r2, e3)), Pe(e1, r1, Pt(e4, r3, e5))", # t-up_DNF
181
+ }
182
+ union_query_structures: List[str] = [
183
+ "e2u", "Pe_e2u", # 2u, up
184
+ "t2u", "Pe_t2u", # t-2u, t-up
185
+ ]
186
+ train_query_structures: List[str] = [
187
+ # entity
188
+ "Pe", "Pe2", "Pe3", "e2i", "e3i", # 1p, 2p, 3p, 2i, 3i
189
+ "e2i_NPe", "e2i_PeN", "Pe_e2i_Pe_NPe", "e2i_N", "e3i_N", # npi, pni, inp, 2in, 3in
190
+ # time
191
+ "Pt", "Pt_lPe", "Pt_rPe", "Pe_Pt", "Pe_aPt", "Pe_bPt", "Pe_nPt", # t-1p, t-2p
192
+ "t2i", "t3i", "Pt_le2i", "Pt_re2i", "Pe_t2i", "Pe_at2i", "Pe_bt2i", "Pe_nt2i", "between", # t-2i, t-3i
193
+ "t2i_NPt", "t2i_PtN", "Pe_t2i_PtPe_NPt", "t2i_N", "t3i_N", # t-npi, t-pni, t-inp, t-2in, t-3in
194
+ ]
195
+ test_query_structures: List[str] = train_query_structures + [
196
+ # entity
197
+ "e2i_Pe", "Pe_e2i", # pi, ip
198
+ "e2u", "Pe_e2u", # 2u, up
199
+ # time
200
+ "t2i_Pe", "Pe_t2i", # t-pi, t-ip
201
+ "t2u", "Pe_t2u", # t-2u, t-up
202
+ # union-DM
203
+ "e2u_DM", "Pe_e2u_DM", # 2u-DM, up-DM
204
+ "t2u_DM", "Pe_t2u_DM", # t-2u-DM, t-up-DM
205
+ ]
206
+
207
+
208
+ # TODO: Add link to the official dataset URLs here
209
+ # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
210
+ # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
211
+ _HOST = "https://huggingface.co"
212
+ _AUTHOR = "linxy59"
213
+ _DATASET = "ICEWS14"
214
+ _URLS = {
215
+ name: f"{_HOST}/{_AUTHOR}/{_DATASET}/zips/{name}.zip"
216
+ for name in ["all"] + list(query_name_to_args.keys())
217
+ }
218
+
219
+ class QueryData(TypedDict):
220
+ """
221
+ saved in training split: query_name, query, answer
222
+ saved in valid or test split: query_name, query, answer, easy_answer
223
+ iterating training dataloader: query_name, query, answer, args, definition
224
+ iterating valid or test dataloader: query_name, query, answer, easy_answer, args, definition
225
+ """
226
+ query_name: str
227
+ query: List[int]
228
+ answer: Set[int]
229
+ easy_answer: Optional[Set[int]] = None # may be empty, indicating that no easy answer exists in training graph.
230
+ args: Optional[List[str]] = None
231
+ definition: Optional[str] = None
232
+
233
+ @dataclass
234
+ class TKGRBuilderConfig(datasets.BuilderConfig):
235
+ """BuilderConfig for TKGR (Temporal Knowledge Graph Reasoning)."""
236
+ query_structure_name: str = "default"
237
+
238
+ class ICEWS14Dataset(datasets.GeneratorBasedBuilder):
239
+ """TODO: Short description of my dataset."""
240
+
241
+ VERSION = datasets.Version("1.0.0")
242
+
243
+ # This is an example of a dataset with multiple configurations.
244
+ # If you don't want/need to define several sub-sets in your dataset,
245
+ # just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
246
+
247
+ # If you need to make complex sub-parts in the datasets with configurable options
248
+ # You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
249
+ # BUILDER_CONFIG_CLASS = MyBuilderConfig
250
+
251
+ # You will be able to load one or the other configurations in the following list with
252
+ # data = datasets.load_dataset('my_dataset', 'first_domain')
253
+ # data = datasets.load_dataset('my_dataset', 'second_domain')
254
+ STANDARD_BUILDER_CONFIGS = [
255
+ datasets.BuilderConfig(
256
+ name=query_name,
257
+ version=datasets.Version("1.0.0"),
258
+ description=query_structures[query_name],
259
+ )
260
+ for query_name in list(query_name_to_args.keys())
261
+ ]
262
+ BUILDER_CONFIGS = [
263
+ datasets.BuilderConfig(
264
+ name="all",
265
+ version=VERSION,
266
+ description=f"All types of queries. Train: {train_query_structures}, Valid | Test: {test_query_structures}",
267
+ )
268
+ ] + STANDARD_BUILDER_CONFIGS
269
+
270
+ DEFAULT_CONFIG_NAME = "all" # It's not mandatory to have a default configuration. Just use one if it make sense.
271
+
272
+ def _info(self):
273
+ if self.config.name == "all": # This is the name of the configuration selected in BUILDER_CONFIGS above
274
+ features = datasets.Features(
275
+ {
276
+ "query_name": datasets.Value("string"),
277
+ "definition": datasets.Value("string"),
278
+ "query": datasets.Sequence(feature=datasets.Value("int32")),
279
+ "answer": datasets.Sequence(feature=datasets.Value("int32")),
280
+ "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
281
+ "args": datasets.Sequence(feature=datasets.Value("string")),
282
+ }
283
+ )
284
+ else:
285
+ features = datasets.Features(
286
+ {
287
+ "query_name": datasets.Value("string"),
288
+ "definition": datasets.Value("string"),
289
+ "query": datasets.Sequence(feature=datasets.Value("int32")),
290
+ "answer": datasets.Sequence(feature=datasets.Value("int32")),
291
+ "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
292
+ "args": datasets.Sequence(feature=datasets.Value("string")),
293
+ }
294
+ )
295
+ return datasets.DatasetInfo(
296
+ description=_DESCRIPTION,
297
+ features=features,
298
+ homepage=_HOMEPAGE,
299
+ license=_LICENSE,
300
+ citation=_CITATION,
301
+ )
302
+
303
+ def _split_generators(self, dl_manager):
304
+ # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
305
+ # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
306
+ # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
307
+ urls = _URLS[self.config.name]
308
+ data_dir = dl_manager.download_and_extract(urls)
309
+ return [
310
+ datasets.SplitGenerator(
311
+ name=datasets.Split.TRAIN,
312
+ # These kwargs will be passed to _generate_examples
313
+ gen_kwargs={
314
+ "filepath": os.path.join(data_dir, "train.jsonl"),
315
+ "split": "train",
316
+ },
317
+ ),
318
+ datasets.SplitGenerator(
319
+ name=datasets.Split.VALIDATION,
320
+ # These kwargs will be passed to _generate_examples
321
+ gen_kwargs={
322
+ "filepath": os.path.join(data_dir, "valid.jsonl"),
323
+ "split": "valid",
324
+ },
325
+ ),
326
+ datasets.SplitGenerator(
327
+ name=datasets.Split.TEST,
328
+ # These kwargs will be passed to _generate_examples
329
+ gen_kwargs={
330
+ "filepath": os.path.join(data_dir, "test.jsonl"),
331
+ "split": "test"
332
+ },
333
+ ),
334
+ ]
335
+
336
+ def _generate_examples(self, filepath, split):
337
+ # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
338
+ # This method yields (key, example) tuples from the dataset.
339
+ # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
340
+ if not os.path.exists(filepath):
341
+ return
342
+ with open(filepath, encoding="utf-8") as f:
343
+ for key, row in enumerate(f):
344
+ data = json.loads(row)
345
+ query_name = data["query_name"]
346
+ if self.config.name == "all":
347
+ yield key, {
348
+ "query_name": query_name,
349
+ "query": data["query"],
350
+ "answer": data["answer"],
351
+ "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
352
+ "args": query_name_to_args[query_name],
353
+ "definition": query_structures[query_name],
354
+ }
355
+ else:
356
+ yield key, {
357
+ "query_name": query_name,
358
+ "query": data["query"],
359
+ "answer": data["answer"],
360
+ "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
361
+ "args": query_name_to_args[query_name],
362
+ "definition": query_structures[query_name],
363
+ }
zips/Pe.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f950fb37d4742d60e15d602609284bf85a51953e040752cc7273552eb42cebb7
3
+ size 506022
zips/Pe2.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b39da5825dc3281ed9c0fef10f1828b1d1a20b083db18a8f9b2fd3ddb9ac9640
3
+ size 1003376
zips/Pe3.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f53c1ccb464b9523a3ed7cb3f8e14662fe996f332652dd87b078655dd377a3f
3
+ size 1245157
zips/Pe_Pt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36682d5d8790b316467c9bcf0e1345c68942818db4f16ddb1e6d9ae1aecf38ba
3
+ size 328054
zips/Pe_aPt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2372e185ce159ad807c17d48130c58bcb3f6aa1b848692ab4ac3104386a23c2
3
+ size 537715
zips/Pe_at2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e944f9e8e6d6f3455ea9d923c66c46c65a40db9daa40daa6d548fa61172fbf74
3
+ size 787606
zips/Pe_bPt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:124b16c05b230a47bddc81c8bb26716bcd1f6ca712b73ce941e44abf3fbc7d3a
3
+ size 526930
zips/Pe_bt2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f91d50ce7ef7a41e10587b2becad4165f613ea8cc09e63b762693a8770ade2b
3
+ size 781404
zips/Pe_e2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccc9cf1fb2766bd3f55a5c71432cb9781b10925bb9772dc9472cf5c2a1f1cb0b
3
+ size 77945
zips/Pe_e2i_Pe_NPe.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3847f589b811f1fc9fd59747a56b8e073ce2b567cfbb8311524079f4011e3fd6
3
+ size 251520
zips/Pe_e2u.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:103b9d8ad995136ffa4a0a2c819b678e7f8a2a21d5c23bc94c429b914147b351
3
+ size 128780
zips/Pe_nPt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ba102fc01e24655312f8544ff7a149fd89cf0e4c70c574bbcedaf513bb44803
3
+ size 206891
zips/Pe_nt2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ffa1b9a71dd2a2142c15f214ee4e2dd2aca38e5c02d4852620ef0b3b86e9be4
3
+ size 782696
zips/Pe_t2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ff5c57660711cce8aa85677e1549cc5a5eb9134be48aecd8b391d3ff971c8d2
3
+ size 129530
zips/Pe_t2i_PtPe_NPt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0bdd0d0ce66d520062d70d8086971497a26df67e28ebba4cba1b81334cac2dc
3
+ size 337940
zips/Pe_t2u.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a39e6091baf79da0c1a2cefc6dd9b93e6e89fd481f280086116d2a193b64ef76
3
+ size 147156
zips/Pt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f42b5df43ac5f8e47f4654b18a1b31351407821e393bb2fa887c1718aedeef0
3
+ size 461072
zips/Pt_oPe.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1bc460d0cd3eba8cf2f1fa57422c408b9ea7ee303c774d4aaf3d1fa28d14a75
3
+ size 248213
zips/Pt_oPe_Pt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3287f99639c0806af9cbfc18014b2c0ddf0b40bd3a50f49a9c980994c8b422e
3
+ size 580996
zips/Pt_oe2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80315df01661060f144b0b8d7953b674d6d2a40a89086ac7ea4819235781d6fe
3
+ size 287755
zips/Pt_sPe.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72a2a8d4d25b8030627af472c447567da95292359f3b3e215b43f901c89e6de8
3
+ size 611280
zips/Pt_sPe_Pt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c6983ff66618ae7282089f419bf95f1f8a82d319e1f1016bcd399e2334394f4
3
+ size 581678
zips/Pt_se2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9992c5eadcf0d1c62e206bd5b1f2b7c81c66cc8fdaeb314f6b87006b5681d90
3
+ size 288130
zips/aPt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a4cc31346d80a59bb68d8d054d14c3e3e31db9b0d6ab60c016f07b4dc551630
3
+ size 414001
zips/all.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bf107f5413221fef3703f5025814015d8e329d2fe31ca8446f06561a0b17c77
3
+ size 20350171
zips/bPt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20a67b547943273f70d294ed992da5357d35fdfc6aeee674ecff1591823574d9
3
+ size 396417
zips/between.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3783b3ad0fd54e66ef432c03d6f7209103789eeb7f56c6c9f757c3e836a34fd3
3
+ size 350731
zips/e2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9ee1e2b4334254eb99fa1d623da924b9a33c07d7e5b03106f2fe52d1e25cd14
3
+ size 1086174
zips/e2i_N.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4846198bddd398b77e790d25c089537ed2712f48f7ddc2afc254f451cb1aaa9c
3
+ size 191702
zips/e2i_NPe.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec2a21892ac89316156b6e9f9a7f0b9870dee0ea70ffc66df1a31bcc036b1e9e
3
+ size 253052
zips/e2i_Pe.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d4a488dd6cca1d37f28d57cf7538f18b9f16a34ec0b1627c53b268ef607fd26
3
+ size 102492
zips/e2i_PeN.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f52c000aa628a8e4d926217f6397077d80702092b20c41507f58c9d7affd964
3
+ size 254376
zips/e2u.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3044815924f9347f24c2017efa99e887ba5cc6ebeabb1d109d3c531849a2aadf
3
+ size 118244
zips/e3i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f48638407d593782c245757c05e799eb316daeceab02db2d83303a2555e97c2
3
+ size 1472352
zips/e3i_N.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a38441674d503617e44843fc41db0f92f3db166aea2ecf0f557c0d65a19978a9
3
+ size 246971
zips/t2i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc5cc9351ed5d3a1451f6bd1757a8e149d3840037be0bc3c2c5a207dbe55a94c
3
+ size 1334435
zips/t2i_N.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0afa5b6dbed1d7f77f79eb5279fea5eccf91645c6fda08587a49d88e0b5903f2
3
+ size 243787
zips/t2i_NPt.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19d68f7e36d67ba201b10b85db848676c457708240fd20d18c8c7fc9fe18dd1f
3
+ size 691047
zips/t2i_Pe.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:470f74afcc172c7a2dfb1a3d8d4d247e2aa22e18b504c151058dd6be1e399684
3
+ size 118901
zips/t2i_PtN.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49f5ed7eefac82492bba892ad7917ce2612190d723d546ae2751031cce5a332b
3
+ size 337712
zips/t2u.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:920f5def0dfe3814778f24c56f8f9db4da5334b34feb49362f4263fde0809ae0
3
+ size 140567
zips/t3i.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b393a4fb4f3d648f79c7ea475863f3c2af6ae5c453b9c17de4e31be876bdfe7d
3
+ size 1540473
zips/t3i_N.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57eac054b06d37589b9a16360d229dfd606744496ae42cb65499f036368f10a9
3
+ size 284006