linxy commited on
Commit
fbe90c5
1 Parent(s): 150999f

Upload ICEWS14.py

Browse files
Files changed (1) hide show
  1. ICEWS14.py +390 -364
ICEWS14.py CHANGED
@@ -1,364 +1,390 @@
1
- # Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # TODO: Address all TODOs and remove all explanatory comments
15
- """
16
- TL;DR: The datasets for temporal knowledge graph reasoning task.
17
-
18
- [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
19
- [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
20
- [[arXiv]](https://arxiv.org/abs/2205.14307)
21
-
22
- - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
23
- - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
24
- - Please refer to the original paper for more details.
25
-
26
-
27
- """
28
- from dataclasses import dataclass
29
- from typing import List, Dict, Set, Optional, TypedDict
30
- import json
31
- import os
32
-
33
- import datasets
34
-
35
-
36
- _CITATION = """\
37
- @inproceedings{
38
- xueyuan2023tflex,
39
- title={TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph},
40
- author={Lin Xueyuan and Haihong E and Chengjin Xu and Gengxian Zhou and Haoran Luo and Tianyi Hu and Fenglong Su and Ningyuan Li and Mingzhi Sun},
41
- booktitle={Thirty-seventh Conference on Neural Information Processing Systems},
42
- year={2023},
43
- url={https://openreview.net/forum?id=oaGdsgB18L}
44
- }\
45
- """
46
-
47
- # TODO: Add description of the dataset here
48
- _DESCRIPTION = """\
49
- TL;DR: The datasets for temporal knowledge graph reasoning task.
50
-
51
- [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
52
- [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
53
- [[arXiv]](https://arxiv.org/abs/2205.14307)
54
-
55
- - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
56
- - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
57
- - Please refer to the original paper for more details.
58
- """
59
-
60
- _HOMEPAGE = "https://github.com/LinXueyuanStdio/TFLEX"
61
-
62
- _LICENSE = "[Apache License 2.0](https://github.com/LinXueyuanStdio/TFLEX/blob/main/LICENSE)"
63
-
64
- query_name_to_args: Dict[str, List[str]] = {
65
- # 1. 1-hop Pe and Pt, manually
66
- "Pe": ['e1', 'r1', 't1'],
67
- "Pt": ['e1', 'r1', 'e2'],
68
- # 2. entity multi-hop
69
- "Pe2": ['e1', 'r1', 't1', 'r2', 't2'],
70
- "Pe3": ['e1', 'r1', 't1', 'r2', 't2', 'r3', 't3'],
71
- # 3. time multi-hop
72
- "aPt": ['s', 'r', 'o'],
73
- "bPt": ['s', 'r', 'o'],
74
- "Pt_sPe": ['e1', 'r1', 't1', 'r2', 'e2'],
75
- "Pt_oPe": ['e1', 'r1', 'e2', 'r2', 't1'],
76
- "Pe_Pt": ['e1', 'r1', 'e2', 'r2', 'e3'],
77
- "Pe_aPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
78
- "Pe_bPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
79
- "Pe_nPt": ['e1', 'r1', 'e2', 'r2', 'e3'],
80
- "Pt_sPe_Pt": ['s1', 'r1', 's2', 'r2', 'o1', 'r3', 'o2'],
81
- "Pt_oPe_Pt": ['s1', 'r1', 's2', 'r2', 's3', 'r3', 'o1'],
82
- # 4. entity and & time and
83
- "e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
84
- "e3i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
85
- "t2i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
86
- "t3i": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
87
- # 5. complex time and
88
- "e2i_Pe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
89
- "Pe_e2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
90
- "Pt_se2i": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 'e3'],
91
- "Pt_oe2i": ['e1', 'r1', 'e2', 'r2', 't1', 'e3', 'r3', 't2'],
92
- "t2i_Pe": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
93
- "Pe_t2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
94
- "Pe_at2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
95
- "Pe_bt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
96
- "Pe_nt2i": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
97
- "between": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
98
- # 5. entity not
99
- "e2i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
100
- "e3i_N": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'e3', 'r3', 't3'],
101
- "Pe_e2i_Pe_NPe": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
102
- "e2i_NPe": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
103
- "e2i_PeN": ['e1', 'r1', 't1', 'r2', 't2', 'e2', 'r3', 't3'],
104
- # 6. time not
105
- "t2i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
106
- "t3i_N": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4', 'e5', 'r3', 'e6'],
107
- "Pe_t2i_PtPe_NPt": ['e1', 'r1', 'e2', 'r2', 't2', 'r3', 'e3', 'e4', 'r4', 'e5'],
108
- "t2i_NPt": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
109
- "t2i_PtN": ['e1', 'r1', 't1', 'r2', 'e2', 'e3', 'r3', 'e4'],
110
- # 7. entity union & time union
111
- "e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2'],
112
- "Pe_e2u": ['e1', 'r1', 't1', 'e2', 'r2', 't2', 'r3', 't3'],
113
- "t2u": ['e1', 'r1', 'e2', 'e3', 'r2', 'e4'],
114
- "Pe_t2u": ['e1', 'r1', 'e2', 'r2', 'e3', 'e4', 'r3', 'e5'],
115
- }
116
- query_structures: Dict[str, str] = {
117
- # 1. 1-hop Pe and Pt, manually
118
- "Pe": "def Pe(e1, r1, t1): return Pe(e1, r1, t1)", # 1p
119
- "Pt": "def Pt(e1, r1, e2): return Pt(e1, r1, e2)", # 1p, temporal
120
- # 2. entity multi-hop
121
- "Pe2": "def Pe2(e1, r1, t1, r2, t2): return Pe(Pe(e1, r1, t1), r2, t2)", # 2p
122
- "Pe3": "def Pe3(e1, r1, t1, r2, t2, r3, t3): return Pe(Pe(Pe(e1, r1, t1), r2, t2), r3, t3)", # 3p
123
- # 3. time multi-hop
124
- "aPt": "def aPt(s, r, o): return after(Pt(s, r, o))", # a for after
125
- "bPt": "def bPt(s, r, o): return before(Pt(s, r, o))", # b for before
126
- "Pt_lPe": "def Pt_lPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
127
- "Pt_rPe": "def Pt_rPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
128
- "Pt_sPe": "def Pt_sPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
129
- "Pt_oPe": "def Pt_oPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
130
- "Pe_Pt": "def Pe_Pt(e1, r1, e2, r2, e3): return Pe(e1, r1, Pt(e2, r2, e3))", # at
131
- "Pe_aPt": "def Pe_aPt(e1, r1, e2, r2, e3): return Pe(e1, r1, after(Pt(e2, r2, e3)))", # a for after
132
- "Pe_bPt": "def Pe_bPt(e1, r1, e2, r2, e3): return Pe(e1, r1, before(Pt(e2, r2, e3)))", # b for before
133
- "Pe_nPt": "def Pe_nPt(e1, r1, e2, r2, e3): return Pe(e1, r1, next(Pt(e2, r2, e3)))", # n for next
134
- "Pt_sPe_Pt": "def Pt_sPe_Pt(s1, r1, s2, r2, o1, r3, o2): return Pt(Pe(s1, r1, Pt(s2, r2, o1)), r3, o2)",
135
- "Pt_oPe_Pt": "def Pt_oPe_Pt(s1, r1, s2, r2, s3, r3, o1): return Pt(s1, r1, Pe(s2, r2, Pt(s3, r3, o1)))",
136
- # 4. entity and & time and
137
- "e2i": "def e2i(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2i
138
- "e3i": "def e3i(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Pe(e3, r3, t3))", # 3i
139
- "t2i": "def t2i(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2i
140
- "t3i": "def t3i(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), Pt(e5, r3, e6))", # t-3i
141
- # 5. complex time and
142
- "e2i_Pe": "def e2i_Pe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Pe(e2, r3, t3))", # pi
143
- "Pe_e2i": "def Pe_e2i(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(e2i(e1, r1, t1, e2, r2, t2), r3, t3)", # ip
144
- "Pt_le2i": "def Pt_le2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
145
- "Pt_re2i": "def Pt_re2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
146
- "Pt_se2i": "def Pt_se2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
147
- "Pt_oe2i": "def Pt_oe2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
148
- "t2i_Pe": "def t2i_Pe(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), Pt(e3, r3, e4))", # t-pi
149
- "Pe_t2i": "def Pe_t2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, t2i(e2, r2, e3, e4, r3, e5))", # t-ip
150
- "Pe_at2i": "def Pe_at2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, after(t2i(e2, r2, e3, e4, r3, e5)))",
151
- "Pe_bt2i": "def Pe_bt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, before(t2i(e2, r2, e3, e4, r3, e5)))",
152
- "Pe_nt2i": "def Pe_nt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, next(t2i(e2, r2, e3, e4, r3, e5)))",
153
- "between": "def between(e1, r1, e2, e3, r2, e4): return TimeAnd(after(Pt(e1, r1, e2)), before(Pt(e3, r2, e4)))", # between(t1, t2) == after t1 and before t2
154
- # 5. entity not
155
- "e2i_N": "def e2i_N(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2)))", # 2in
156
- "e3i_N": "def e3i_N(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Not(Pe(e3, r3, t3)))", # 3in
157
- "Pe_e2i_Pe_NPe": "def Pe_e2i_Pe_NPe(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2))), r3, t3)", # inp
158
- "e2i_PeN": "def e2i_PeN(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Not(Pe(e2, r3, t3)))", # pin
159
- "e2i_NPe": "def e2i_NPe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Not(Pe(Pe(e1, r1, t1), r2, t2)), Pe(e2, r3, t3))", # pni = e2i_N(Pe(e1, r1, t1), r2, t2, e2, r3, t3)
160
- # 6. time not
161
- "t2i_N": "def t2i_N(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), TimeNot(Pt(e3, r2, e4)))", # t-2in
162
- "t3i_N": "def t3i_N(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), TimeNot(Pt(e5, r3, e6)))", # t-3in
163
- "Pe_t2i_PtPe_NPt": "def Pe_t2i_PtPe_NPt(e1, r1, e2, r2, t2, r3, e3, e4, r4, e5): return Pe(e1, r1, TimeAnd(Pt(Pe(e2, r2, t2), r3, e3), TimeNot(Pt(e4, r4, e5))))", # t-inp
164
- "t2i_PtN": "def t2i_PtN(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), TimeNot(Pt(e3, r3, e4)))", # t-pin
165
- "t2i_NPt": "def t2i_NPt(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(TimeNot(Pt(Pe(e1, r1, t1), r2, e2)), Pt(e3, r3, e4))", # t-pni
166
- # 7. entity union & time union
167
- "e2u": "def e2u(e1, r1, t1, e2, r2, t2): return Or(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2u
168
- "Pe_e2u": "def Pe_e2u(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Or(Pe(e1, r1, t1), Pe(e2, r2, t2)), r3, t3)", # up
169
- "t2u": "def t2u(e1, r1, e2, e3, r2, e4): return TimeOr(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2u
170
- "Pe_t2u": "def Pe_t2u(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeOr(Pt(e2, r2, e3), Pt(e4, r3, e5)))", # t-up
171
- # 8. union-DM
172
- "e2u_DM": "def e2u_DM(e1, r1, t1, e2, r2, t2): return Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2))))", # 2u-DM
173
- "Pe_e2u_DM": "def Pe_e2u_DM(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2)))), r3, t3)", # up-DM
174
- "t2u_DM": "def t2u_DM(e1, r1, e2, e3, r2, e4): return TimeNot(TimeAnd(TimeNot(Pt(e1, r1, e2)), TimeNot(Pt(e3, r2, e4))))", # t-2u-DM
175
- "Pe_t2u_DM": "def Pe_t2u_DM(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeNot(TimeAnd(TimeNot(Pt(e2, r2, e3)), TimeNot(Pt(e4, r3, e5)))))", # t-up-DM
176
- # 9. union-DNF
177
- "e2u_DNF": "def e2u_DNF(e1, r1, t1, e2, r2, t2): return Pe(e1, r1, t1), Pe(e2, r2, t2)", # 2u_DNF
178
- "Pe_e2u_DNF": "def Pe_e2u_DNF(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Pe(e1, r1, t1), r3, t3), Pe(Pe(e2, r2, t2), r3, t3)", # up_DNF
179
- "t2u_DNF": "def t2u_DNF(e1, r1, e2, e3, r2, e4): return Pt(e1, r1, e2), Pt(e3, r2, e4)", # t-2u_DNF
180
- "Pe_t2u_DNF": "def Pe_t2u_DNF(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, Pt(e2, r2, e3)), Pe(e1, r1, Pt(e4, r3, e5))", # t-up_DNF
181
- }
182
- union_query_structures: List[str] = [
183
- "e2u", "Pe_e2u", # 2u, up
184
- "t2u", "Pe_t2u", # t-2u, t-up
185
- ]
186
- train_query_structures: List[str] = [
187
- # entity
188
- "Pe", "Pe2", "Pe3", "e2i", "e3i", # 1p, 2p, 3p, 2i, 3i
189
- "e2i_NPe", "e2i_PeN", "Pe_e2i_Pe_NPe", "e2i_N", "e3i_N", # npi, pni, inp, 2in, 3in
190
- # time
191
- "Pt", "Pt_lPe", "Pt_rPe", "Pe_Pt", "Pe_aPt", "Pe_bPt", "Pe_nPt", # t-1p, t-2p
192
- "t2i", "t3i", "Pt_le2i", "Pt_re2i", "Pe_t2i", "Pe_at2i", "Pe_bt2i", "Pe_nt2i", "between", # t-2i, t-3i
193
- "t2i_NPt", "t2i_PtN", "Pe_t2i_PtPe_NPt", "t2i_N", "t3i_N", # t-npi, t-pni, t-inp, t-2in, t-3in
194
- ]
195
- test_query_structures: List[str] = train_query_structures + [
196
- # entity
197
- "e2i_Pe", "Pe_e2i", # pi, ip
198
- "e2u", "Pe_e2u", # 2u, up
199
- # time
200
- "t2i_Pe", "Pe_t2i", # t-pi, t-ip
201
- "t2u", "Pe_t2u", # t-2u, t-up
202
- # union-DM
203
- "e2u_DM", "Pe_e2u_DM", # 2u-DM, up-DM
204
- "t2u_DM", "Pe_t2u_DM", # t-2u-DM, t-up-DM
205
- ]
206
-
207
-
208
- # TODO: Add link to the official dataset URLs here
209
- # The HuggingFace Datasets library doesn't host the datasets but only points to the original files.
210
- # This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
211
- _HOST = "https://huggingface.co/datasets"
212
- _AUTHOR = "linxy"
213
- _DATASET = "ICEWS14"
214
- _URLS = {
215
- name: f"{_HOST}/{_AUTHOR}/{_DATASET}/resolve/main/zips/{name}.zip?download=true"
216
- for name in ["all"] + list(query_name_to_args.keys())
217
- }
218
-
219
-
220
- class QueryData(TypedDict):
221
- """
222
- saved in training split: query_name, query, answer
223
- saved in valid or test split: query_name, query, answer, easy_answer
224
- iterating training dataloader: query_name, query, answer, args, definition
225
- iterating valid or test dataloader: query_name, query, answer, easy_answer, args, definition
226
- """
227
- query_name: str
228
- query: List[int]
229
- answer: Set[int]
230
- easy_answer: Optional[Set[int]] = None # may be empty, indicating that no easy answer exists in training graph.
231
- args: Optional[List[str]] = None
232
- definition: Optional[str] = None
233
-
234
- @dataclass
235
- class TKGRBuilderConfig(datasets.BuilderConfig):
236
- """BuilderConfig for TKGR (Temporal Knowledge Graph Reasoning)."""
237
- query_structure_name: str = "default"
238
-
239
- class ICEWS14Dataset(datasets.GeneratorBasedBuilder):
240
- """TODO: Short description of my dataset."""
241
-
242
- VERSION = datasets.Version("1.0.0")
243
-
244
- # This is an example of a dataset with multiple configurations.
245
- # If you don't want/need to define several sub-sets in your dataset,
246
- # just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
247
-
248
- # If you need to make complex sub-parts in the datasets with configurable options
249
- # You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
250
- # BUILDER_CONFIG_CLASS = MyBuilderConfig
251
-
252
- # You will be able to load one or the other configurations in the following list with
253
- # data = datasets.load_dataset('my_dataset', 'first_domain')
254
- # data = datasets.load_dataset('my_dataset', 'second_domain')
255
- STANDARD_BUILDER_CONFIGS = [
256
- datasets.BuilderConfig(
257
- name=query_name,
258
- version=datasets.Version("1.0.0"),
259
- description=query_structures[query_name],
260
- )
261
- for query_name in list(query_name_to_args.keys())
262
- ]
263
- BUILDER_CONFIGS = [
264
- datasets.BuilderConfig(
265
- name="all",
266
- version=VERSION,
267
- description=f"All types of queries. Train: {train_query_structures}, Valid | Test: {test_query_structures}",
268
- )
269
- ] + STANDARD_BUILDER_CONFIGS
270
-
271
- DEFAULT_CONFIG_NAME = "all" # It's not mandatory to have a default configuration. Just use one if it make sense.
272
-
273
- def _info(self):
274
- if self.config.name == "all": # This is the name of the configuration selected in BUILDER_CONFIGS above
275
- features = datasets.Features(
276
- {
277
- "query_name": datasets.Value("string"),
278
- "definition": datasets.Value("string"),
279
- "query": datasets.Sequence(feature=datasets.Value("int32")),
280
- "answer": datasets.Sequence(feature=datasets.Value("int32")),
281
- "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
282
- "args": datasets.Sequence(feature=datasets.Value("string")),
283
- }
284
- )
285
- else:
286
- features = datasets.Features(
287
- {
288
- "query_name": datasets.Value("string"),
289
- "definition": datasets.Value("string"),
290
- "query": datasets.Sequence(feature=datasets.Value("int32")),
291
- "answer": datasets.Sequence(feature=datasets.Value("int32")),
292
- "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
293
- "args": datasets.Sequence(feature=datasets.Value("string")),
294
- }
295
- )
296
- return datasets.DatasetInfo(
297
- description=_DESCRIPTION,
298
- features=features,
299
- homepage=_HOMEPAGE,
300
- license=_LICENSE,
301
- citation=_CITATION,
302
- )
303
-
304
- def _split_generators(self, dl_manager):
305
- # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
306
- # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
307
- # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
308
- urls = _URLS[self.config.name]
309
- data_dir = dl_manager.download_and_extract(urls)
310
- return [
311
- datasets.SplitGenerator(
312
- name=datasets.Split.TRAIN,
313
- # These kwargs will be passed to _generate_examples
314
- gen_kwargs={
315
- "filepath": os.path.join(data_dir, "train.jsonl"),
316
- "split": "train",
317
- },
318
- ),
319
- datasets.SplitGenerator(
320
- name=datasets.Split.VALIDATION,
321
- # These kwargs will be passed to _generate_examples
322
- gen_kwargs={
323
- "filepath": os.path.join(data_dir, "valid.jsonl"),
324
- "split": "valid",
325
- },
326
- ),
327
- datasets.SplitGenerator(
328
- name=datasets.Split.TEST,
329
- # These kwargs will be passed to _generate_examples
330
- gen_kwargs={
331
- "filepath": os.path.join(data_dir, "test.jsonl"),
332
- "split": "test"
333
- },
334
- ),
335
- ]
336
-
337
- def _generate_examples(self, filepath, split):
338
- # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
339
- # This method yields (key, example) tuples from the dataset.
340
- # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
341
- if not os.path.exists(filepath):
342
- return
343
- with open(filepath, encoding="utf-8") as f:
344
- for key, row in enumerate(f):
345
- data = json.loads(row)
346
- query_name = data["query_name"]
347
- if self.config.name == "all":
348
- yield key, {
349
- "query_name": query_name,
350
- "query": data["query"],
351
- "answer": data["answer"],
352
- "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
353
- "args": query_name_to_args[query_name],
354
- "definition": query_structures[query_name],
355
- }
356
- else:
357
- yield key, {
358
- "query_name": query_name,
359
- "query": data["query"],
360
- "answer": data["answer"],
361
- "easy_answer": data["easy_answer"] if "easy_answer" in data else None,
362
- "args": query_name_to_args[query_name],
363
- "definition": query_structures[query_name],
364
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2023 Xueyuan Lin
2
+ # Apache 2.0 License
3
+ """Loading script for DiffusionDB."""
4
+ from typing import List, Dict
5
+ import json
6
+ import os
7
+ from huggingface_hub import hf_hub_url
8
+ import datasets
9
+
10
+
11
+ _CITATION = """\
12
+ @inproceedings{
13
+ xueyuan2023tflex,
14
+ title={TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph},
15
+ author={Lin Xueyuan and Haihong E and Chengjin Xu and Gengxian Zhou and Haoran Luo and Tianyi Hu and Fenglong Su and Ningyuan Li and Mingzhi Sun},
16
+ booktitle={Thirty-seventh Conference on Neural Information Processing Systems},
17
+ year={2023},
18
+ url={https://openreview.net/forum?id=oaGdsgB18L}
19
+ }\
20
+ """
21
+
22
+ _DESCRIPTION = """\
23
+ TL;DR: The datasets for temporal knowledge graph reasoning task.
24
+
25
+ [[Github]](https://github.com/LinXueyuanStdio/TFLEX)
26
+ [[OpenReview]](https://openreview.net/forum?id=oaGdsgB18L)
27
+ [[arXiv]](https://arxiv.org/abs/2205.14307)
28
+
29
+ - Built over ICEWS and GDELT, which are widly used benchmarks in TKGC.
30
+ - First introduced in paper "TFLEX: Temporal Feature-Logic Embedding Framework for Complex Reasoning over Temporal Knowledge Graph"
31
+ - Please refer to the original paper for more details.
32
+ """
33
+
34
+ _HOMEPAGE = "https://github.com/LinXueyuanStdio/TFLEX"
35
+
36
+ _LICENSE = "[Apache License 2.0](https://github.com/LinXueyuanStdio/TFLEX/blob/main/LICENSE)"
37
+
38
+ query_name_to_args: Dict[str, List[str]] = {
39
+ # 1. 1-hop Pe and Pt, manually
40
+ "Pe": ["e1", "r1", "t1"],
41
+ "Pt": ["e1", "r1", "e2"],
42
+ # 2. entity multi-hop
43
+ "Pe2": ["e1", "r1", "t1", "r2", "t2"],
44
+ "Pe3": ["e1", "r1", "t1", "r2", "t2", "r3", "t3"],
45
+ # 3. time multi-hop
46
+ "aPt": ["s", "r", "o"],
47
+ "bPt": ["s", "r", "o"],
48
+ "Pt_sPe": ["e1", "r1", "t1", "r2", "e2"],
49
+ "Pt_oPe": ["e1", "r1", "e2", "r2", "t1"],
50
+ "Pe_Pt": ["e1", "r1", "e2", "r2", "e3"],
51
+ "Pe_aPt": ["e1", "r1", "e2", "r2", "e3"],
52
+ "Pe_bPt": ["e1", "r1", "e2", "r2", "e3"],
53
+ "Pe_nPt": ["e1", "r1", "e2", "r2", "e3"],
54
+ "Pt_sPe_Pt": ["s1", "r1", "s2", "r2", "o1", "r3", "o2"],
55
+ "Pt_oPe_Pt": ["s1", "r1", "s2", "r2", "s3", "r3", "o1"],
56
+ # 4. entity and & time and
57
+ "e2i": ["e1", "r1", "t1", "e2", "r2", "t2"],
58
+ "e3i": ["e1", "r1", "t1", "e2", "r2", "t2", "e3", "r3", "t3"],
59
+ "t2i": ["e1", "r1", "e2", "e3", "r2", "e4"],
60
+ "t3i": ["e1", "r1", "e2", "e3", "r2", "e4", "e5", "r3", "e6"],
61
+ # 5. complex time and
62
+ "e2i_Pe": ["e1", "r1", "t1", "r2", "t2", "e2", "r3", "t3"],
63
+ "Pe_e2i": ["e1", "r1", "t1", "e2", "r2", "t2", "r3", "t3"],
64
+ "Pt_se2i": ["e1", "r1", "t1", "e2", "r2", "t2", "r3", "e3"],
65
+ "Pt_oe2i": ["e1", "r1", "e2", "r2", "t1", "e3", "r3", "t2"],
66
+ "t2i_Pe": ["e1", "r1", "t1", "r2", "e2", "e3", "r3", "e4"],
67
+ "Pe_t2i": ["e1", "r1", "e2", "r2", "e3", "e4", "r3", "e5"],
68
+ "Pe_at2i": ["e1", "r1", "e2", "r2", "e3", "e4", "r3", "e5"],
69
+ "Pe_bt2i": ["e1", "r1", "e2", "r2", "e3", "e4", "r3", "e5"],
70
+ "Pe_nt2i": ["e1", "r1", "e2", "r2", "e3", "e4", "r3", "e5"],
71
+ "between": ["e1", "r1", "e2", "e3", "r2", "e4"],
72
+ # 5. entity not
73
+ "e2i_N": ["e1", "r1", "t1", "e2", "r2", "t2"],
74
+ "e3i_N": ["e1", "r1", "t1", "e2", "r2", "t2", "e3", "r3", "t3"],
75
+ "Pe_e2i_Pe_NPe": ["e1", "r1", "t1", "e2", "r2", "t2", "r3", "t3"],
76
+ "e2i_NPe": ["e1", "r1", "t1", "r2", "t2", "e2", "r3", "t3"],
77
+ "e2i_PeN": ["e1", "r1", "t1", "r2", "t2", "e2", "r3", "t3"],
78
+ # 6. time not
79
+ "t2i_N": ["e1", "r1", "e2", "e3", "r2", "e4"],
80
+ "t3i_N": ["e1", "r1", "e2", "e3", "r2", "e4", "e5", "r3", "e6"],
81
+ "Pe_t2i_PtPe_NPt": ["e1", "r1", "e2", "r2", "t2", "r3", "e3", "e4", "r4", "e5"],
82
+ "t2i_NPt": ["e1", "r1", "t1", "r2", "e2", "e3", "r3", "e4"],
83
+ "t2i_PtN": ["e1", "r1", "t1", "r2", "e2", "e3", "r3", "e4"],
84
+ # 7. entity union & time union
85
+ "e2u": ["e1", "r1", "t1", "e2", "r2", "t2"],
86
+ "Pe_e2u": ["e1", "r1", "t1", "e2", "r2", "t2", "r3", "t3"],
87
+ "t2u": ["e1", "r1", "e2", "e3", "r2", "e4"],
88
+ "Pe_t2u": ["e1", "r1", "e2", "r2", "e3", "e4", "r3", "e5"],
89
+ }
90
+ query_structures: Dict[str, str] = {
91
+ # 1. 1-hop Pe and Pt, manually
92
+ "Pe": "def Pe(e1, r1, t1): return Pe(e1, r1, t1)", # 1p
93
+ "Pt": "def Pt(e1, r1, e2): return Pt(e1, r1, e2)", # 1p, temporal
94
+ # 2. entity multi-hop
95
+ "Pe2": "def Pe2(e1, r1, t1, r2, t2): return Pe(Pe(e1, r1, t1), r2, t2)", # 2p
96
+ "Pe3": "def Pe3(e1, r1, t1, r2, t2, r3, t3): return Pe(Pe(Pe(e1, r1, t1), r2, t2), r3, t3)", # 3p
97
+ # 3. time multi-hop
98
+ "aPt": "def aPt(s, r, o): return after(Pt(s, r, o))", # a for after
99
+ "bPt": "def bPt(s, r, o): return before(Pt(s, r, o))", # b for before
100
+ "Pt_lPe": "def Pt_lPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
101
+ "Pt_rPe": "def Pt_rPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
102
+ "Pt_sPe": "def Pt_sPe(e1, r1, t1, r2, e2): return Pt(Pe(e1, r1, t1), r2, e2)", # l for left (as head entity)
103
+ "Pt_oPe": "def Pt_oPe(e1, r1, e2, r2, t1): return Pt(e1, r1, Pe(e2, r2, t1))", # r for right (as tail entity)
104
+ "Pe_Pt": "def Pe_Pt(e1, r1, e2, r2, e3): return Pe(e1, r1, Pt(e2, r2, e3))", # at
105
+ "Pe_aPt": "def Pe_aPt(e1, r1, e2, r2, e3): return Pe(e1, r1, after(Pt(e2, r2, e3)))", # a for after
106
+ "Pe_bPt": "def Pe_bPt(e1, r1, e2, r2, e3): return Pe(e1, r1, before(Pt(e2, r2, e3)))", # b for before
107
+ "Pe_nPt": "def Pe_nPt(e1, r1, e2, r2, e3): return Pe(e1, r1, next(Pt(e2, r2, e3)))", # n for next
108
+ "Pt_sPe_Pt": "def Pt_sPe_Pt(s1, r1, s2, r2, o1, r3, o2): return Pt(Pe(s1, r1, Pt(s2, r2, o1)), r3, o2)",
109
+ "Pt_oPe_Pt": "def Pt_oPe_Pt(s1, r1, s2, r2, s3, r3, o1): return Pt(s1, r1, Pe(s2, r2, Pt(s3, r3, o1)))",
110
+ # 4. entity and & time and
111
+ "e2i": "def e2i(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2i
112
+ "e3i": "def e3i(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Pe(e3, r3, t3))", # 3i
113
+ "t2i": "def t2i(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2i
114
+ "t3i": "def t3i(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), Pt(e5, r3, e6))", # t-3i
115
+ # 5. complex time and
116
+ "e2i_Pe": "def e2i_Pe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Pe(e2, r3, t3))", # pi
117
+ "Pe_e2i": "def Pe_e2i(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(e2i(e1, r1, t1, e2, r2, t2), r3, t3)", # ip
118
+ "Pt_le2i": "def Pt_le2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
119
+ "Pt_re2i": "def Pt_re2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
120
+ "Pt_se2i": "def Pt_se2i(e1, r1, t1, e2, r2, t2, r3, e3): return Pt(e2i(e1, r1, t1, e2, r2, t2), r3, e3)", # mix ip
121
+ "Pt_oe2i": "def Pt_oe2i(e1, r1, e2, r2, t1, e3, r3, t2): return Pt(e1, r1, e2i(e2, r2, t1, e3, r3, t2))", # mix ip
122
+ "t2i_Pe": "def t2i_Pe(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), Pt(e3, r3, e4))", # t-pi
123
+ "Pe_t2i": "def Pe_t2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, t2i(e2, r2, e3, e4, r3, e5))", # t-ip
124
+ "Pe_at2i": "def Pe_at2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, after(t2i(e2, r2, e3, e4, r3, e5)))",
125
+ "Pe_bt2i": "def Pe_bt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, before(t2i(e2, r2, e3, e4, r3, e5)))",
126
+ "Pe_nt2i": "def Pe_nt2i(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, next(t2i(e2, r2, e3, e4, r3, e5)))",
127
+ "between": "def between(e1, r1, e2, e3, r2, e4): return TimeAnd(after(Pt(e1, r1, e2)), before(Pt(e3, r2, e4)))", # between(t1, t2) == after t1 and before t2
128
+ # 5. entity not
129
+ "e2i_N": "def e2i_N(e1, r1, t1, e2, r2, t2): return And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2)))", # 2in
130
+ "e3i_N": "def e3i_N(e1, r1, t1, e2, r2, t2, e3, r3, t3): return And3(Pe(e1, r1, t1), Pe(e2, r2, t2), Not(Pe(e3, r3, t3)))", # 3in
131
+ "Pe_e2i_Pe_NPe": "def Pe_e2i_Pe_NPe(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(And(Pe(e1, r1, t1), Not(Pe(e2, r2, t2))), r3, t3)", # inp
132
+ "e2i_PeN": "def e2i_PeN(e1, r1, t1, r2, t2, e2, r3, t3): return And(Pe(Pe(e1, r1, t1), r2, t2), Not(Pe(e2, r3, t3)))", # pin
133
+ "e2i_NPe": "def e2i_NPe(e1, r1, t1, r2, t2, e2, r3, t3): return And(Not(Pe(Pe(e1, r1, t1), r2, t2)), Pe(e2, r3, t3))", # pni = e2i_N(Pe(e1, r1, t1), r2, t2, e2, r3, t3)
134
+ # 6. time not
135
+ "t2i_N": "def t2i_N(e1, r1, e2, e3, r2, e4): return TimeAnd(Pt(e1, r1, e2), TimeNot(Pt(e3, r2, e4)))", # t-2in
136
+ "t3i_N": "def t3i_N(e1, r1, e2, e3, r2, e4, e5, r3, e6): return TimeAnd3(Pt(e1, r1, e2), Pt(e3, r2, e4), TimeNot(Pt(e5, r3, e6)))", # t-3in
137
+ "Pe_t2i_PtPe_NPt": "def Pe_t2i_PtPe_NPt(e1, r1, e2, r2, t2, r3, e3, e4, r4, e5): return Pe(e1, r1, TimeAnd(Pt(Pe(e2, r2, t2), r3, e3), TimeNot(Pt(e4, r4, e5))))", # t-inp
138
+ "t2i_PtN": "def t2i_PtN(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(Pt(Pe(e1, r1, t1), r2, e2), TimeNot(Pt(e3, r3, e4)))", # t-pin
139
+ "t2i_NPt": "def t2i_NPt(e1, r1, t1, r2, e2, e3, r3, e4): return TimeAnd(TimeNot(Pt(Pe(e1, r1, t1), r2, e2)), Pt(e3, r3, e4))", # t-pni
140
+ # 7. entity union & time union
141
+ "e2u": "def e2u(e1, r1, t1, e2, r2, t2): return Or(Pe(e1, r1, t1), Pe(e2, r2, t2))", # 2u
142
+ "Pe_e2u": "def Pe_e2u(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Or(Pe(e1, r1, t1), Pe(e2, r2, t2)), r3, t3)", # up
143
+ "t2u": "def t2u(e1, r1, e2, e3, r2, e4): return TimeOr(Pt(e1, r1, e2), Pt(e3, r2, e4))", # t-2u
144
+ "Pe_t2u": "def Pe_t2u(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeOr(Pt(e2, r2, e3), Pt(e4, r3, e5)))", # t-up
145
+ # 8. union-DM
146
+ "e2u_DM": "def e2u_DM(e1, r1, t1, e2, r2, t2): return Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2))))", # 2u-DM
147
+ "Pe_e2u_DM": "def Pe_e2u_DM(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Not(And(Not(Pe(e1, r1, t1)), Not(Pe(e2, r2, t2)))), r3, t3)", # up-DM
148
+ "t2u_DM": "def t2u_DM(e1, r1, e2, e3, r2, e4): return TimeNot(TimeAnd(TimeNot(Pt(e1, r1, e2)), TimeNot(Pt(e3, r2, e4))))", # t-2u-DM
149
+ "Pe_t2u_DM": "def Pe_t2u_DM(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, TimeNot(TimeAnd(TimeNot(Pt(e2, r2, e3)), TimeNot(Pt(e4, r3, e5)))))", # t-up-DM
150
+ # 9. union-DNF
151
+ "e2u_DNF": "def e2u_DNF(e1, r1, t1, e2, r2, t2): return Pe(e1, r1, t1), Pe(e2, r2, t2)", # 2u_DNF
152
+ "Pe_e2u_DNF": "def Pe_e2u_DNF(e1, r1, t1, e2, r2, t2, r3, t3): return Pe(Pe(e1, r1, t1), r3, t3), Pe(Pe(e2, r2, t2), r3, t3)", # up_DNF
153
+ "t2u_DNF": "def t2u_DNF(e1, r1, e2, e3, r2, e4): return Pt(e1, r1, e2), Pt(e3, r2, e4)", # t-2u_DNF
154
+ "Pe_t2u_DNF": "def Pe_t2u_DNF(e1, r1, e2, r2, e3, e4, r3, e5): return Pe(e1, r1, Pt(e2, r2, e3)), Pe(e1, r1, Pt(e4, r3, e5))", # t-up_DNF
155
+ }
156
+ union_query_structures: List[str] = [
157
+ "e2u",
158
+ "Pe_e2u", # 2u, up
159
+ "t2u",
160
+ "Pe_t2u", # t-2u, t-up
161
+ ]
162
+ train_query_structures: List[str] = [
163
+ # entity
164
+ "Pe",
165
+ "Pe2",
166
+ "Pe3",
167
+ "e2i",
168
+ "e3i", # 1p, 2p, 3p, 2i, 3i
169
+ "e2i_NPe",
170
+ "e2i_PeN",
171
+ "Pe_e2i_Pe_NPe",
172
+ "e2i_N",
173
+ "e3i_N", # npi, pni, inp, 2in, 3in
174
+ # time
175
+ "Pt",
176
+ "Pt_lPe",
177
+ "Pt_rPe",
178
+ "Pe_Pt",
179
+ "Pe_aPt",
180
+ "Pe_bPt",
181
+ "Pe_nPt", # t-1p, t-2p
182
+ "t2i",
183
+ "t3i",
184
+ "Pt_le2i",
185
+ "Pt_re2i",
186
+ "Pe_t2i",
187
+ "Pe_at2i",
188
+ "Pe_bt2i",
189
+ "Pe_nt2i",
190
+ "between", # t-2i, t-3i
191
+ "t2i_NPt",
192
+ "t2i_PtN",
193
+ "Pe_t2i_PtPe_NPt",
194
+ "t2i_N",
195
+ "t3i_N", # t-npi, t-pni, t-inp, t-2in, t-3in
196
+ ]
197
+ test_query_structures: List[str] = train_query_structures + [
198
+ # entity
199
+ "e2i_Pe",
200
+ "Pe_e2i", # pi, ip
201
+ "e2u",
202
+ "Pe_e2u", # 2u, up
203
+ # time
204
+ "t2i_Pe",
205
+ "Pe_t2i", # t-pi, t-ip
206
+ "t2u",
207
+ "Pe_t2u", # t-2u, t-up
208
+ # union-DM
209
+ "e2u_DM",
210
+ "Pe_e2u_DM", # 2u-DM, up-DM
211
+ "t2u_DM",
212
+ "Pe_t2u_DM", # t-2u-DM, t-up-DM
213
+ ]
214
+
215
+
216
+ _AUTHOR = "linxy"
217
+ _DATASET = "ICEWS14"
218
+ _URLS = {
219
+ name: hf_hub_url(f"{_AUTHOR}/{_DATASET}", filename=f"zips/{name}.zip", repo_type="dataset")
220
+ for name in ["all"] + list(query_name_to_args.keys())
221
+ } | {
222
+ "meta": hf_hub_url(f"{_AUTHOR}/{_DATASET}", filename="meta.json", repo_type="dataset")
223
+ }
224
+
225
+
226
+ class ICEWS14Dataset(datasets.GeneratorBasedBuilder):
227
+ VERSION = datasets.Version("1.0.0")
228
+
229
+ STANDARD_BUILDER_CONFIGS = [
230
+ datasets.BuilderConfig(
231
+ name=query_name,
232
+ version=datasets.Version("1.0.0"),
233
+ description=query_structures[query_name],
234
+ )
235
+ for query_name in list(query_name_to_args.keys())
236
+ ]
237
+ BUILDER_CONFIGS = [
238
+ datasets.BuilderConfig(
239
+ name="meta",
240
+ version=VERSION,
241
+ description=f"The meta of data, including entity/relation/timestamp count, entity2idx, relation2idx, timestamp2idx, etc.",
242
+ ),
243
+ datasets.BuilderConfig(
244
+ name="all",
245
+ version=VERSION,
246
+ description=f"All types of queries. Train: {train_query_structures}, Valid | Test: {test_query_structures}",
247
+ ),
248
+ ] + STANDARD_BUILDER_CONFIGS
249
+
250
+ DEFAULT_CONFIG_NAME = "all" # It's not mandatory to have a default configuration. Just use one if it make sense.
251
+
252
+ def _info(self):
253
+ if self.config.name == "meta":
254
+ features = datasets.Features(
255
+ {
256
+ "dataset": datasets.Value("string"),
257
+ "entity_count": datasets.Value("int32"),
258
+ "relation_count": datasets.Value("int32"),
259
+ "timestamp_count": datasets.Value("int32"),
260
+ "valid_triples_count": datasets.Value("int32"),
261
+ "test_triples_count": datasets.Value("int32"),
262
+ "train_triples_count": datasets.Value("int32"),
263
+ "triple_count": datasets.Value("int32"),
264
+ "query_meta": datasets.Sequence(
265
+ feature={
266
+ "query_name": datasets.Value("string"),
267
+ "queries_count": datasets.Value("int32"),
268
+ "avg_answers_count": datasets.Value("float"),
269
+ "train": {
270
+ "queries_count": datasets.Value("int32"),
271
+ "avg_answers_count": datasets.Value("float"),
272
+ },
273
+ "valid": {
274
+ "queries_count": datasets.Value("int32"),
275
+ "avg_answers_count": datasets.Value("float"),
276
+ },
277
+ "test": {
278
+ "queries_count": datasets.Value("int32"),
279
+ "avg_answers_count": datasets.Value("float"),
280
+ },
281
+ }
282
+ ),
283
+ "entity2idx": datasets.Sequence(
284
+ feature={
285
+ "name": datasets.Value("string"),
286
+ "id": datasets.Value("int32"),
287
+ }
288
+ ),
289
+ "relation2idx": datasets.Sequence(
290
+ feature={
291
+ "name": datasets.Value("string"),
292
+ "id": datasets.Value("int32"),
293
+ }
294
+ ),
295
+ "timestamp2idx": datasets.Sequence(
296
+ feature={
297
+ "name": datasets.Value("string"),
298
+ "id": datasets.Value("int32"),
299
+ }
300
+ ),
301
+ }
302
+ )
303
+ else:
304
+ features = datasets.Features(
305
+ {
306
+ "query_name": datasets.Value("string"),
307
+ "definition": datasets.Value("string"),
308
+ "query": datasets.Sequence(feature=datasets.Value("int32")),
309
+ "answer": datasets.Sequence(feature=datasets.Value("int32")),
310
+ "easy_answer": datasets.Sequence(feature=datasets.Value("int32")),
311
+ "args": datasets.Sequence(feature=datasets.Value("string")),
312
+ }
313
+ )
314
+ return datasets.DatasetInfo(
315
+ description=_DESCRIPTION,
316
+ features=features,
317
+ homepage=_HOMEPAGE,
318
+ license=_LICENSE,
319
+ citation=_CITATION,
320
+ )
321
+
322
+ def _split_generators(self, dl_manager: datasets.download.DownloadManager):
323
+ # dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLS
324
+ # It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
325
+ # By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
326
+ url = _URLS[self.config.name]
327
+ if self.config.name == "meta":
328
+ data_file = dl_manager.download(_URLS["meta"])
329
+ return [
330
+ datasets.SplitGenerator(
331
+ name=datasets.Split.TRAIN,
332
+ # These kwargs will be passed to _generate_examples
333
+ gen_kwargs={
334
+ "filepath": data_file,
335
+ "split": "meta",
336
+ },
337
+ )
338
+ ]
339
+ data_dir = dl_manager.download_and_extract(url)
340
+ return [
341
+ datasets.SplitGenerator(
342
+ name=datasets.Split.TRAIN,
343
+ # These kwargs will be passed to _generate_examples
344
+ gen_kwargs={
345
+ "filepath": os.path.join(data_dir, "train.jsonl"),
346
+ "split": "train",
347
+ },
348
+ ),
349
+ datasets.SplitGenerator(
350
+ name=datasets.Split.VALIDATION,
351
+ # These kwargs will be passed to _generate_examples
352
+ gen_kwargs={
353
+ "filepath": os.path.join(data_dir, "valid.jsonl"),
354
+ "split": "valid",
355
+ },
356
+ ),
357
+ datasets.SplitGenerator(
358
+ name=datasets.Split.TEST,
359
+ # These kwargs will be passed to _generate_examples
360
+ gen_kwargs={
361
+ "filepath": os.path.join(data_dir, "test.jsonl"),
362
+ "split": "test",
363
+ },
364
+ ),
365
+ ]
366
+
367
+ def _generate_examples(self, filepath, split):
368
+ # method parameters are unpacked from `gen_kwargs` as given in `_split_generators`
369
+ # This method yields (key, example) tuples from the dataset.
370
+ # The `key` is for legacy reasons (tfds) and is not important in itself, but must be unique for each example.
371
+ if not os.path.exists(filepath):
372
+ return
373
+ if split == "meta":
374
+ with open(filepath, "r", encoding="utf-8") as f:
375
+ data = json.load(f)
376
+ yield 0, data
377
+ return
378
+ with open(filepath, "r", encoding="utf-8") as f:
379
+ for key, row in enumerate(f):
380
+ data = json.loads(row)
381
+ query_name = data["query_name"]
382
+ easy_answer = data["easy_answer"] if "easy_answer" in data else []
383
+ yield key, {
384
+ "query_name": query_name,
385
+ "query": data["query"],
386
+ "answer": data["answer"],
387
+ "easy_answer": easy_answer,
388
+ "args": query_name_to_args[query_name],
389
+ "definition": query_structures[query_name],
390
+ }