luozhouyang commited on
Commit
c3386a7
1 Parent(s): 33297f1

Fixed generate examples

Browse files
Files changed (2) hide show
  1. dataset_infos.json +1 -0
  2. dureader.py +36 -11
dataset_infos.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"robust": {"description": "", "citation": "", "homepage": "", "license": "", "features": {"id": {"dtype": "string", "id": null, "_type": "Value"}, "context": {"dtype": "string", "id": null, "_type": "Value"}, "question": {"dtype": "string", "id": null, "_type": "Value"}, "answers": {"feature": {"text": {"dtype": "string", "id": null, "_type": "Value"}, "answer_start": {"dtype": "int32", "id": null, "_type": "Value"}}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "task_templates": null, "builder_name": "du_reader", "config_name": "robust", "version": {"version_str": "1.0.0", "description": "", "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 12509537, "num_examples": 14520, "dataset_name": "du_reader"}, "validation": {"name": "validation", "num_bytes": 1228154, "num_examples": 1417, "dataset_name": "du_reader"}, "test": {"name": "test", "num_bytes": 46912731, "num_examples": 50000, "dataset_name": "du_reader"}}, "download_checksums": {"https://dataset-bj.cdn.bcebos.com/qianyan/dureader_robust-data.tar.gz": {"num_bytes": 20518631, "checksum": "99bed9ced8995df1c89b9789f890c27a13b4650a56b4d973907cc28da8bd9f0f"}}, "download_size": 20518631, "post_processing_size": null, "dataset_size": 60650422, "size_in_bytes": 81169053}, "checklist": {"description": "", "citation": "", "homepage": "", "license": "", "features": {"id": {"dtype": "string", "id": null, "_type": "Value"}, "title": {"dtype": "string", "id": null, "_type": "Value"}, "context": {"dtype": "string", "id": null, "_type": "Value"}, "question": {"dtype": "string", "id": null, "_type": "Value"}, "is_impossible": {"dtype": "string", "id": null, "_type": "Value"}, "answers": {"feature": {"text": {"dtype": "string", "id": null, "_type": "Value"}, "answer_start": {"dtype": "int32", "id": null, "_type": "Value"}}, "length": -1, "id": null, "_type": "Sequence"}, "type": {"dtype": "string", "id": null, "_type": "Value"}}, "post_processed": null, "supervised_keys": null, "task_templates": null, "builder_name": "du_reader", "config_name": "checklist", "version": {"version_str": "1.0.0", "description": "", "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 2459480, "num_examples": 2999, "dataset_name": "du_reader"}, "validation": {"name": "validation", "num_bytes": 879626, "num_examples": 1130, "dataset_name": "du_reader"}, "test": {"name": "test", "num_bytes": 32421655, "num_examples": 49992, "dataset_name": "du_reader"}}, "download_checksums": {"https://dataset-bj.cdn.bcebos.com/qianyan/dureader_checklist-data.tar.gz": {"num_bytes": 18319191, "checksum": "223c370696b9f1e8c89d84b7935c8da88128004788ad80fb9c3830130461e6f3"}}, "download_size": 18319191, "post_processing_size": null, "dataset_size": 35760761, "size_in_bytes": 54079952}}
dureader.py CHANGED
@@ -56,13 +56,14 @@ class DuReader(datasets.GeneratorBasedBuilder):
56
  "title": datasets.Value("string"),
57
  "context": datasets.Value("string"),
58
  "question": datasets.Value("string"),
59
- "is_impossible": datasets.Value("bool"),
60
  "answers": datasets.Sequence(
61
  {
62
  "text": datasets.Value("string"),
63
  "answer_start": datasets.Value("int32"),
64
  }
65
  ),
 
66
  }
67
  return datasets.DatasetInfo(
68
  description="",
@@ -123,16 +124,18 @@ class DuReader(datasets.GeneratorBasedBuilder):
123
  def _generate_examples(self, data_file, split):
124
  if self.config.name == "robust":
125
  if split == "train" or split == "dev":
 
126
  return self._generate_robust_examples(data_file)
 
127
  return self._generate_robust_test_examples(data_file)
128
 
129
  if self.config.name == "checklist":
130
  if split == "train" or split == "dev":
 
131
  return self._generate_checklist_examples(data_file)
 
132
  return self._generate_checklist_test_examples(data_file)
133
 
134
- return None, None
135
-
136
  def _generate_robust_examples(self, data_file):
137
  with open(data_file, mode="rt", encoding="utf-8") as fin:
138
  data = json.load(fin)["data"]
@@ -160,49 +163,71 @@ class DuReader(datasets.GeneratorBasedBuilder):
160
  for p in d["paragraphs"]:
161
  context = p["context"]
162
  for qa in p["qas"]:
 
163
  example = {
164
- "id": qa["id"],
165
  "context": context,
166
  "question": qa["question"],
 
 
 
 
167
  }
168
  yield example["id"], example
169
 
170
  def _generate_checklist_examples(self, data_file):
171
  with open(data_file, mode="rt", encoding="utf-8") as fin:
172
  data = json.load(fin)["data"]
 
173
  for d in data:
174
  for p in d["paragraphs"]:
175
- title = p["title"]
176
- context = p["context"]
177
  for qa in p["qas"]:
 
 
 
 
 
178
  starts = [x["answer_start"] for x in qa["answers"]]
179
- answers = [x["text"] for x in qa["answers"]]
180
  example = {
181
- "id": qa["id"],
182
  "title": title,
183
  "context": context,
184
- "question": qa["question"],
185
  "is_impossible": qa["is_impossible"],
186
  "answers": {
187
  "text": answers,
188
  "answer_start": starts,
189
  },
190
- "type": qa["type"],
191
  }
192
  yield example["id"], example
193
 
194
  def _generate_checklist_test_examples(self, data_file):
195
  with open(data_file, mode="rt", encoding="utf-8") as fin:
196
  data = json.load(fin)["data"]
 
197
  for d in data:
198
  for p in d["paragraphs"]:
199
  title = p["title"]
200
  context = p["context"]
201
  for qa in p["qas"]:
 
 
 
 
202
  example = {
203
- "id": qa["id"],
204
  "title": title,
205
  "context": context,
206
  "question": qa["question"],
 
 
 
 
 
 
207
  }
208
  yield example["id"], example
 
56
  "title": datasets.Value("string"),
57
  "context": datasets.Value("string"),
58
  "question": datasets.Value("string"),
59
+ "is_impossible": datasets.Value("string"),
60
  "answers": datasets.Sequence(
61
  {
62
  "text": datasets.Value("string"),
63
  "answer_start": datasets.Value("int32"),
64
  }
65
  ),
66
+ "type": datasets.Value("string"),
67
  }
68
  return datasets.DatasetInfo(
69
  description="",
 
124
  def _generate_examples(self, data_file, split):
125
  if self.config.name == "robust":
126
  if split == "train" or split == "dev":
127
+ print("Processing split: ", split)
128
  return self._generate_robust_examples(data_file)
129
+ print("Processing split: ", split)
130
  return self._generate_robust_test_examples(data_file)
131
 
132
  if self.config.name == "checklist":
133
  if split == "train" or split == "dev":
134
+ print("Processing split: ", split)
135
  return self._generate_checklist_examples(data_file)
136
+ print("Processing split: ", split)
137
  return self._generate_checklist_test_examples(data_file)
138
 
 
 
139
  def _generate_robust_examples(self, data_file):
140
  with open(data_file, mode="rt", encoding="utf-8") as fin:
141
  data = json.load(fin)["data"]
 
163
  for p in d["paragraphs"]:
164
  context = p["context"]
165
  for qa in p["qas"]:
166
+ qid = qa["id"]
167
  example = {
168
+ "id": qid,
169
  "context": context,
170
  "question": qa["question"],
171
+ "answers": {
172
+ "text": [],
173
+ "answer_start": [],
174
+ },
175
  }
176
  yield example["id"], example
177
 
178
  def _generate_checklist_examples(self, data_file):
179
  with open(data_file, mode="rt", encoding="utf-8") as fin:
180
  data = json.load(fin)["data"]
181
+ exist_ids = set()
182
  for d in data:
183
  for p in d["paragraphs"]:
184
+ title = p["title"].strip()
185
+ context = p["context"].strip()
186
  for qa in p["qas"]:
187
+ qid = qa["id"]
188
+ # skip dumplicate keys
189
+ if qid in exist_ids:
190
+ continue
191
+ exist_ids.add(qid)
192
  starts = [x["answer_start"] for x in qa["answers"]]
193
+ answers = [x["text"].strip() for x in qa["answers"]]
194
  example = {
195
+ "id": qid,
196
  "title": title,
197
  "context": context,
198
+ "question": qa["question"].strip(),
199
  "is_impossible": qa["is_impossible"],
200
  "answers": {
201
  "text": answers,
202
  "answer_start": starts,
203
  },
204
+ "type": qa["type"].strip(),
205
  }
206
  yield example["id"], example
207
 
208
  def _generate_checklist_test_examples(self, data_file):
209
  with open(data_file, mode="rt", encoding="utf-8") as fin:
210
  data = json.load(fin)["data"]
211
+ exist_ids = set()
212
  for d in data:
213
  for p in d["paragraphs"]:
214
  title = p["title"]
215
  context = p["context"]
216
  for qa in p["qas"]:
217
+ qid = qa["id"]
218
+ if qid in exist_ids:
219
+ continue
220
+ exist_ids.add(qid)
221
  example = {
222
+ "id": qid,
223
  "title": title,
224
  "context": context,
225
  "question": qa["question"],
226
+ "is_impossible": None,
227
+ "answers": {
228
+ "text": [],
229
+ "answer_start": [],
230
+ },
231
+ "type": None,
232
  }
233
  yield example["id"], example