Datasets:

kimsan0622 commited on
Commit
4fa57a6
1 Parent(s): 5624e2c

initial commit

Browse files
Files changed (2) hide show
  1. README.md +18 -0
  2. korquad.py +319 -0
README.md ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!--
2
+ Copyright 2021 san kim
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ -->
16
+
17
+ # KorQuAD
18
+
korquad.py ADDED
@@ -0,0 +1,319 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 san kim
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ # coding=utf-8
16
+ # Copyright 2021 The TensorFlow Datasets Authors.
17
+ #
18
+ # Licensed under the Apache License, Version 2.0 (the "License");
19
+ # you may not use this file except in compliance with the License.
20
+ # You may obtain a copy of the License at
21
+ #
22
+ # http://www.apache.org/licenses/LICENSE-2.0
23
+ #
24
+ # Unless required by applicable law or agreed to in writing, software
25
+ # distributed under the License is distributed on an "AS IS" BASIS,
26
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27
+ # See the License for the specific language governing permissions and
28
+ # limitations under the License.
29
+
30
+ # modified by kimsan0622@keti.re.kr
31
+ """korquad dataset."""
32
+ import os
33
+ import json
34
+ import copy
35
+ import glob
36
+ import hashlib
37
+ import functools
38
+
39
+ import datasets
40
+
41
+ # KorQuad: https://korquad.github.io/
42
+ # ---------------------------------------------
43
+ _KORQUAD_URL='https://korquad.github.io/'
44
+ # https://github.com/korquad/korquad.github.io/raw/master/dataset/KorQuAD_v1.0_dev.json
45
+ _KORQUAD_ROOT='https://github.com/korquad/korquad.github.io/raw/master/dataset/'
46
+ _KORQUADV1_TRAIN_LINK=[os.path.join(_KORQUAD_ROOT, 'KorQuAD_v1.0_train.json')]
47
+ _KORQUADV1_DEV_LINK=[os.path.join(_KORQUAD_ROOT, 'KorQuAD_v1.0_dev.json')]
48
+ _KORQUADV1_DEFAULT_SPLIT={'train': _KORQUADV1_TRAIN_LINK, 'dev': _KORQUADV1_DEV_LINK}
49
+ _KORQUADV1_DESCRIPTION = """
50
+ KorQuAD1.0
51
+ """
52
+ _KORQUADV1_CITATION = """
53
+ @article{DBLP:journals/corr/abs-1909-07005,
54
+ author = {Seungyoung Lim and
55
+ Myungji Kim and
56
+ Jooyoul Lee},
57
+ title = {KorQuAD1.0: Korean {QA} Dataset for Machine Reading Comprehension},
58
+ journal = {CoRR},
59
+ volume = {abs/1909.07005},
60
+ year = {2019},
61
+ url = {http://arxiv.org/abs/1909.07005},
62
+ archivePrefix = {arXiv},
63
+ eprint = {1909.07005},
64
+ timestamp = {Mon, 23 Sep 2019 18:07:15 +0200},
65
+ biburl = {https://dblp.org/rec/journals/corr/abs-1909-07005.bib},
66
+ bibsource = {dblp computer science bibliography, https://dblp.org}
67
+ }
68
+ """
69
+
70
+ # https://github.com/korquad/korquad.github.io/raw/master/dataset/KorQuAD_2.1/train/KorQuAD_2.1_train_00.zip
71
+ _KORQUADV2_TRAIN_LINK=[os.path.join(_KORQUAD_ROOT,'KorQuAD_2.1/train', 'KorQuAD_2.1_train_{0:02d}.zip'.format(idx)) for idx in range(13)]
72
+ _KORQUADV2_DEV_LINK=[os.path.join(_KORQUAD_ROOT,'KorQuAD_2.1/dev', 'KorQuAD_2.1_dev_{0:02d}.zip'.format(idx)) for idx in range(2)]
73
+ _KORQUADV2_DEFAULT_SPLIT={'train': _KORQUADV2_TRAIN_LINK, 'dev': _KORQUADV2_DEV_LINK}
74
+ _KORQUADV2_DESCRIPTION = """
75
+ KorQuAD2.1
76
+ """
77
+ _KORQUADV2_CITATION = """
78
+ 김영민, 임승영, 이현정, 박소윤, 김명지. (2020). KorQuAD 2.0: 웹문서 기계독해를 위한 한국어 질의응답 데이터셋. 정보과학회논문지, 47(6), 577-586.
79
+ """
80
+
81
+
82
+ SQUADLIKE_FEATURES = datasets.Features({
83
+ "id":
84
+ datasets.Value("string"),
85
+ "title":
86
+ datasets.Value("string"),
87
+ "context":
88
+ datasets.Value("string"),
89
+ "question":
90
+ datasets.Value("string"),
91
+ "answers":
92
+ datasets.Sequence({
93
+ "text": datasets.Value("string"),
94
+ "answer_start": datasets.Value("int32"),
95
+ }),
96
+ })
97
+
98
+ # adopted from question_answering in tensorflow_datasets
99
+ def generate_squadlike_examples(filepath):
100
+ """Parses a SQuAD-like JSON, yielding examples with `SQUADLIKE_FEATURES`."""
101
+ # We first re-group the answers, which may be flattened (e.g., by XTREME).
102
+ qas = {}
103
+ with open(filepath) as f:
104
+ squad = json.load(f)
105
+ for article in squad["data"]:
106
+ title = article.get("title", "")
107
+ for paragraph in article["paragraphs"]:
108
+ context = paragraph["context"]
109
+ for qa in paragraph["qas"]:
110
+ qa["title"] = title
111
+ qa["context"] = context
112
+ id_ = qa["id"]
113
+ if id_ in qas:
114
+ qas[id_]["answers"].extend(qa["answers"])
115
+ else:
116
+ qas[id_] = qa
117
+
118
+ for id_, qa in qas.items():
119
+ answer_starts = [answer["answer_start"] for answer in qa["answers"]]
120
+ answers = [answer["text"] for answer in qa["answers"]]
121
+ yield id_, {
122
+ "title": qa["title"],
123
+ "context": qa["context"],
124
+ "question": qa["question"],
125
+ "id": id_,
126
+ "answers": {
127
+ "answer_start": answer_starts,
128
+ "text": answers,
129
+ },
130
+ }
131
+
132
+ _KORQUADV2_KEY_MAP={'context':'context', 'answer_start': 'answer_start', 'text':'text'}
133
+ _KORQUADV2_HTML_KEY_MAP={'context':'raw_html', 'answer_start': 'html_answer_start', 'text':'html_answer_text'}
134
+
135
+ def generate_korquadv2_examples(filepath, KEY_MAP):
136
+ qas = {}
137
+ with open(filepath) as f:
138
+ squad = json.load(f)
139
+ for article in squad["data"]:
140
+ title = article.get("title", "").strip()
141
+ context = article[KEY_MAP['context']]
142
+ for qa in article["qas"]:
143
+ qa["title"] = title
144
+ qa["context"] = context
145
+ id_ = qa["id"]
146
+ qa["answers"] = [copy.deepcopy(qa["answer"])]
147
+ del qa["answer"]
148
+ qas[id_] = qa
149
+
150
+ for id_, qa in qas.items():
151
+ answer_starts = [answer[KEY_MAP['answer_start']] for answer in qa["answers"]]
152
+ answers = [answer[KEY_MAP['text']] for answer in qa["answers"]]
153
+ yield id_, {
154
+ "title": qa["title"],
155
+ "context": qa["context"],
156
+ "question": qa["question"].strip(),
157
+ "id": id_,
158
+ "answers": {
159
+ "answer_start": answer_starts,
160
+ "text": answers,
161
+ },
162
+ }
163
+
164
+ _KORQUAD_MANUAL_SPLIT = {
165
+ 'source': {
166
+ datasets.Split.TRAIN: ['train'],
167
+ datasets.Split.VALIDATION: ['train'],
168
+ datasets.Split.TEST: ['dev'],
169
+ },
170
+ 'split': {
171
+ datasets.Split.TRAIN: lambda x: x % 10 != 0,
172
+ datasets.Split.VALIDATION: lambda x: x % 10 == 0,
173
+ datasets.Split.TEST: lambda x: True,
174
+ }}
175
+
176
+ def _update_split(file_dict, split_dict):
177
+ source_dict = split_dict['source']
178
+ return_dict = {}
179
+ for k, v in source_dict.items():
180
+ flist = []
181
+ for vv in v:
182
+ flist.extend(file_dict[vv] if isinstance(file_dict[vv], list) else [file_dict[vv]])
183
+ return_dict[k] = flist
184
+ return return_dict
185
+
186
+ def _hash_text(text):
187
+ return hashlib.md5(text.encode("utf-8")).hexdigest()
188
+
189
+ def _filter_fn_hash_id(uid, split_fn):
190
+ hash_id = _hash_text(str(uid))
191
+ val = int(hash_id, 16)
192
+ return split_fn(val)
193
+
194
+
195
+ _VERSION = datasets.Version('1.0.0', "")
196
+
197
+ class KorquadConfig(datasets.BuilderConfig):
198
+ def __init__( self,
199
+ name,
200
+ data_url,
201
+ description,
202
+ citation,
203
+ manual_split=None,
204
+ **kwargs):
205
+ super(KorquadConfig, self).__init__(
206
+ name=name,
207
+ version=_VERSION,
208
+ **kwargs
209
+ )
210
+ self.data_url=data_url
211
+ self.description=description
212
+ self.citation=citation
213
+ self.manual_split=manual_split
214
+
215
+ class Korquad(datasets.GeneratorBasedBuilder):
216
+ """DatasetBuilder for korquad dataset."""
217
+ RELEASE_NOTES = {
218
+ '1.0.0': 'Initial release.',
219
+ }
220
+
221
+ BUILDER_CONFIGS = [
222
+ KorquadConfig(
223
+ 'v1.0',
224
+ data_url=_KORQUADV1_DEFAULT_SPLIT,
225
+ description=_KORQUADV1_DESCRIPTION,
226
+ citation=_KORQUADV1_CITATION,
227
+ ),
228
+ KorquadConfig(
229
+ 'v1.0.split',
230
+ data_url=_KORQUADV1_DEFAULT_SPLIT,
231
+ description=_KORQUADV1_DESCRIPTION,
232
+ citation=_KORQUADV1_CITATION,
233
+ manual_split=_KORQUAD_MANUAL_SPLIT,
234
+ ),
235
+ KorquadConfig(
236
+ 'v2.1',
237
+ data_url=_KORQUADV2_DEFAULT_SPLIT,
238
+ description=_KORQUADV2_DESCRIPTION,
239
+ citation=_KORQUADV2_CITATION,
240
+ ),
241
+ KorquadConfig(
242
+ 'v2.1.split',
243
+ data_url=_KORQUADV2_DEFAULT_SPLIT,
244
+ description=_KORQUADV2_DESCRIPTION,
245
+ citation=_KORQUADV2_CITATION,
246
+ manual_split=_KORQUAD_MANUAL_SPLIT,
247
+ ),
248
+ KorquadConfig(
249
+ 'v2.1.html',
250
+ data_url=_KORQUADV2_DEFAULT_SPLIT,
251
+ description=_KORQUADV2_DESCRIPTION,
252
+ citation=_KORQUADV2_CITATION,
253
+ ),
254
+ KorquadConfig(
255
+ 'v2.1.html.split',
256
+ data_url=_KORQUADV2_DEFAULT_SPLIT,
257
+ description=_KORQUADV2_DESCRIPTION,
258
+ citation=_KORQUADV2_CITATION,
259
+ manual_split=_KORQUAD_MANUAL_SPLIT,
260
+ ),
261
+ ]
262
+
263
+ def _info(self) -> datasets.DatasetInfo:
264
+ """Returns the dataset metadata."""
265
+ features_dict = SQUADLIKE_FEATURES
266
+
267
+ return datasets.DatasetInfo(
268
+ description=self.config.description,
269
+ features=features_dict,
270
+ homepage=_KORQUAD_URL,
271
+ citation=self.config.citation,
272
+ )
273
+
274
+ def _split_generators(self, dl_manager: datasets.DownloadManager):
275
+ """Returns SplitGenerators."""
276
+
277
+ path_kv = {k:dl_manager.download_and_extract(v) for k, v in self.config.data_url.items()}
278
+ if not self.config.name.startswith("v1.0"):
279
+ for k, v in path_kv.items():
280
+ file_names = []
281
+ for vv in v:
282
+ file_names.extend(glob.glob(os.path.join(vv, "*.json")))
283
+ path_kv[k] = file_names
284
+
285
+ if self.config.manual_split is not None:
286
+ path_kv = _update_split(path_kv, self.config.manual_split)
287
+ split_fn = self.config.manual_split['split']
288
+ #return {k:self._generate_examples(v, split_fn[k]) for k, v in path_kv.items()}
289
+ return [datasets.SplitGenerator(name=k, gen_kwargs={'path_list': v, 'split_fn': split_fn[k]}) for k, v in path_kv.items()]
290
+
291
+ # TODO(korquad): Returns the Dict[split names, Iterator[Key, Example]]
292
+ #return {k:self._generate_examples(v) for k, v in path_kv.items()}
293
+ return [datasets.SplitGenerator(name=k, gen_kwargs={'path_list': v}) for k, v in path_kv.items()]
294
+
295
+ def _generate_examples(self, path_list, split_fn=None):
296
+ """Yields examples."""
297
+ # TODO(korquad): Yields (key, example) tuples from the dataset
298
+ if self.config.name.startswith("v2.1.html"):
299
+ gen_fn = functools.partial(generate_korquadv2_examples, KEY_MAP=_KORQUADV2_HTML_KEY_MAP)
300
+ elif self.config.name.startswith("v2.1"):
301
+ gen_fn = functools.partial(generate_korquadv2_examples, KEY_MAP=_KORQUADV2_KEY_MAP)
302
+ else:
303
+ gen_fn = generate_squadlike_examples
304
+
305
+ if split_fn is not None:
306
+ split_filter = functools.partial(_filter_fn_hash_id, split_fn=split_fn)
307
+ else:
308
+ split_filter = lambda x: True
309
+
310
+ _hash_set = set()
311
+
312
+ for fpath in path_list:
313
+ for example in iter(gen_fn(fpath)):
314
+ uid, _ = example
315
+ if split_filter(str(uid)) and str(uid) not in _hash_set:
316
+ _hash_set.add(str(uid))
317
+ yield example
318
+
319
+ # tfds build --data_dir ../../tmp/tensorflow_datasets --config v1.0.split