Commit
•
e67d674
1
Parent(s):
8123a62
Fix DuplicatedKeysError on msr_sqa dataset (#3771)
Browse files* Make keys unique
* Fix dataset card
Commit from https://github.com/huggingface/datasets/commit/3538e4e5ad595225c854e59d10736876da4a6d45
- README.md +182 -5
- msr_sqa.py +2 -4
README.md
CHANGED
@@ -73,13 +73,170 @@ We created SQA by asking crowdsourced workers to decompose 2,022 questions from
|
|
73 |
|
74 |
### Languages
|
75 |
|
76 |
-
English
|
77 |
|
78 |
## Dataset Structure
|
79 |
|
80 |
### Data Instances
|
81 |
|
82 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
|
84 |
### Data Fields
|
85 |
|
@@ -100,7 +257,11 @@ It is recommended to use a CSV parser like the Python CSV package to process the
|
|
100 |
|
101 |
### Data Splits
|
102 |
|
103 |
-
|
|
|
|
|
|
|
|
|
104 |
|
105 |
## Dataset Creation
|
106 |
|
@@ -154,11 +315,27 @@ It is recommended to use a CSV parser like the Python CSV package to process the
|
|
154 |
|
155 |
### Licensing Information
|
156 |
|
157 |
-
[
|
158 |
|
159 |
### Citation Information
|
160 |
|
161 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
162 |
|
163 |
### Contributions
|
164 |
|
|
|
73 |
|
74 |
### Languages
|
75 |
|
76 |
+
English (`en`).
|
77 |
|
78 |
## Dataset Structure
|
79 |
|
80 |
### Data Instances
|
81 |
|
82 |
+
```
|
83 |
+
{'id': 'nt-639',
|
84 |
+
'annotator': 0,
|
85 |
+
'position': 0,
|
86 |
+
'question': 'where are the players from?',
|
87 |
+
'table_file': 'table_csv/203_149.csv',
|
88 |
+
'table_header': ['Pick', 'Player', 'Team', 'Position', 'School'],
|
89 |
+
'table_data': [['1',
|
90 |
+
'Ben McDonald',
|
91 |
+
'Baltimore Orioles',
|
92 |
+
'RHP',
|
93 |
+
'Louisiana State University'],
|
94 |
+
['2',
|
95 |
+
'Tyler Houston',
|
96 |
+
'Atlanta Braves',
|
97 |
+
'C',
|
98 |
+
'"Valley HS (Las Vegas',
|
99 |
+
' NV)"'],
|
100 |
+
['3', 'Roger Salkeld', 'Seattle Mariners', 'RHP', 'Saugus (CA) HS'],
|
101 |
+
['4',
|
102 |
+
'Jeff Jackson',
|
103 |
+
'Philadelphia Phillies',
|
104 |
+
'OF',
|
105 |
+
'"Simeon HS (Chicago',
|
106 |
+
' IL)"'],
|
107 |
+
['5', 'Donald Harris', 'Texas Rangers', 'OF', 'Texas Tech University'],
|
108 |
+
['6', 'Paul Coleman', 'Saint Louis Cardinals', 'OF', 'Frankston (TX) HS'],
|
109 |
+
['7', 'Frank Thomas', 'Chicago White Sox', '1B', 'Auburn University'],
|
110 |
+
['8', 'Earl Cunningham', 'Chicago Cubs', 'OF', 'Lancaster (SC) HS'],
|
111 |
+
['9',
|
112 |
+
'Kyle Abbott',
|
113 |
+
'California Angels',
|
114 |
+
'LHP',
|
115 |
+
'Long Beach State University'],
|
116 |
+
['10',
|
117 |
+
'Charles Johnson',
|
118 |
+
'Montreal Expos',
|
119 |
+
'C',
|
120 |
+
'"Westwood HS (Fort Pierce',
|
121 |
+
' FL)"'],
|
122 |
+
['11',
|
123 |
+
'Calvin Murray',
|
124 |
+
'Cleveland Indians',
|
125 |
+
'3B',
|
126 |
+
'"W.T. White High School (Dallas',
|
127 |
+
' TX)"'],
|
128 |
+
['12', 'Jeff Juden', 'Houston Astros', 'RHP', 'Salem (MA) HS'],
|
129 |
+
['13', 'Brent Mayne', 'Kansas City Royals', 'C', 'Cal State Fullerton'],
|
130 |
+
['14',
|
131 |
+
'Steve Hosey',
|
132 |
+
'San Francisco Giants',
|
133 |
+
'OF',
|
134 |
+
'Fresno State University'],
|
135 |
+
['15',
|
136 |
+
'Kiki Jones',
|
137 |
+
'Los Angeles Dodgers',
|
138 |
+
'RHP',
|
139 |
+
'"Hillsborough HS (Tampa',
|
140 |
+
' FL)"'],
|
141 |
+
['16', 'Greg Blosser', 'Boston Red Sox', 'OF', 'Sarasota (FL) HS'],
|
142 |
+
['17', 'Cal Eldred', 'Milwaukee Brewers', 'RHP', 'University of Iowa'],
|
143 |
+
['18',
|
144 |
+
'Willie Greene',
|
145 |
+
'Pittsburgh Pirates',
|
146 |
+
'SS',
|
147 |
+
'"Jones County HS (Gray',
|
148 |
+
' GA)"'],
|
149 |
+
['19', 'Eddie Zosky', 'Toronto Blue Jays', 'SS', 'Fresno State University'],
|
150 |
+
['20', 'Scott Bryant', 'Cincinnati Reds', 'OF', 'University of Texas'],
|
151 |
+
['21', 'Greg Gohr', 'Detroit Tigers', 'RHP', 'Santa Clara University'],
|
152 |
+
['22',
|
153 |
+
'Tom Goodwin',
|
154 |
+
'Los Angeles Dodgers',
|
155 |
+
'OF',
|
156 |
+
'Fresno State University'],
|
157 |
+
['23', 'Mo Vaughn', 'Boston Red Sox', '1B', 'Seton Hall University'],
|
158 |
+
['24', 'Alan Zinter', 'New York Mets', 'C', 'University of Arizona'],
|
159 |
+
['25', 'Chuck Knoblauch', 'Minnesota Twins', '2B', 'Texas A&M University'],
|
160 |
+
['26', 'Scott Burrell', 'Seattle Mariners', 'RHP', 'Hamden (CT) HS']],
|
161 |
+
'answer_coordinates': {'row_index': [0,
|
162 |
+
1,
|
163 |
+
2,
|
164 |
+
3,
|
165 |
+
4,
|
166 |
+
5,
|
167 |
+
6,
|
168 |
+
7,
|
169 |
+
8,
|
170 |
+
9,
|
171 |
+
10,
|
172 |
+
11,
|
173 |
+
12,
|
174 |
+
13,
|
175 |
+
14,
|
176 |
+
15,
|
177 |
+
16,
|
178 |
+
17,
|
179 |
+
18,
|
180 |
+
19,
|
181 |
+
20,
|
182 |
+
21,
|
183 |
+
22,
|
184 |
+
23,
|
185 |
+
24,
|
186 |
+
25],
|
187 |
+
'column_index': [4,
|
188 |
+
4,
|
189 |
+
4,
|
190 |
+
4,
|
191 |
+
4,
|
192 |
+
4,
|
193 |
+
4,
|
194 |
+
4,
|
195 |
+
4,
|
196 |
+
4,
|
197 |
+
4,
|
198 |
+
4,
|
199 |
+
4,
|
200 |
+
4,
|
201 |
+
4,
|
202 |
+
4,
|
203 |
+
4,
|
204 |
+
4,
|
205 |
+
4,
|
206 |
+
4,
|
207 |
+
4,
|
208 |
+
4,
|
209 |
+
4,
|
210 |
+
4,
|
211 |
+
4,
|
212 |
+
4]},
|
213 |
+
'answer_text': ['Louisiana State University',
|
214 |
+
'Valley HS (Las Vegas, NV)',
|
215 |
+
'Saugus (CA) HS',
|
216 |
+
'Simeon HS (Chicago, IL)',
|
217 |
+
'Texas Tech University',
|
218 |
+
'Frankston (TX) HS',
|
219 |
+
'Auburn University',
|
220 |
+
'Lancaster (SC) HS',
|
221 |
+
'Long Beach State University',
|
222 |
+
'Westwood HS (Fort Pierce, FL)',
|
223 |
+
'W.T. White High School (Dallas, TX)',
|
224 |
+
'Salem (MA) HS',
|
225 |
+
'Cal State Fullerton',
|
226 |
+
'Fresno State University',
|
227 |
+
'Hillsborough HS (Tampa, FL)',
|
228 |
+
'Sarasota (FL) HS',
|
229 |
+
'University of Iowa',
|
230 |
+
'Jones County HS (Gray, GA)',
|
231 |
+
'Fresno State University',
|
232 |
+
'University of Texas',
|
233 |
+
'Santa Clara University',
|
234 |
+
'Fresno State University',
|
235 |
+
'Seton Hall University',
|
236 |
+
'University of Arizona',
|
237 |
+
'Texas A&M University',
|
238 |
+
'Hamden (CT) HS']}
|
239 |
+
```
|
240 |
|
241 |
### Data Fields
|
242 |
|
|
|
257 |
|
258 |
### Data Splits
|
259 |
|
260 |
+
|
261 |
+
| | train | test |
|
262 |
+
|-------------|------:|-----:|
|
263 |
+
| N. examples | 14541 | 3012 |
|
264 |
+
|
265 |
|
266 |
## Dataset Creation
|
267 |
|
|
|
315 |
|
316 |
### Licensing Information
|
317 |
|
318 |
+
[Microsoft Research Data License Agreement](https://msropendata-web-api.azurewebsites.net/licenses/2f933be3-284d-500b-7ea3-2aa2fd0f1bb2/view).
|
319 |
|
320 |
### Citation Information
|
321 |
|
322 |
+
```
|
323 |
+
@inproceedings{iyyer-etal-2017-search,
|
324 |
+
title = "Search-based Neural Structured Learning for Sequential Question Answering",
|
325 |
+
author = "Iyyer, Mohit and
|
326 |
+
Yih, Wen-tau and
|
327 |
+
Chang, Ming-Wei",
|
328 |
+
booktitle = "Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
|
329 |
+
month = jul,
|
330 |
+
year = "2017",
|
331 |
+
address = "Vancouver, Canada",
|
332 |
+
publisher = "Association for Computational Linguistics",
|
333 |
+
url = "https://aclanthology.org/P17-1167",
|
334 |
+
doi = "10.18653/v1/P17-1167",
|
335 |
+
pages = "1821--1831",
|
336 |
+
}
|
337 |
+
|
338 |
+
```
|
339 |
|
340 |
### Contributions
|
341 |
|
msr_sqa.py
CHANGED
@@ -153,14 +153,12 @@ class MsrSQA(datasets.GeneratorBasedBuilder):
|
|
153 |
|
154 |
def _generate_examples(self, filepath, data_dir):
|
155 |
"""Yields examples."""
|
156 |
-
|
157 |
with open(filepath, encoding="utf-8") as f:
|
158 |
reader = csv.DictReader(f, delimiter="\t")
|
159 |
-
for
|
160 |
-
item = dict(row)
|
161 |
item["answer_text"] = _parse_answer_text(item["answer_text"])
|
162 |
item["answer_coordinates"] = _parse_answer_coordinates(item["answer_coordinates"])
|
163 |
header, table_data = _load_table_data(os.path.join(data_dir, item["table_file"]))
|
164 |
item["table_header"] = header
|
165 |
item["table_data"] = table_data
|
166 |
-
yield
|
|
|
153 |
|
154 |
def _generate_examples(self, filepath, data_dir):
|
155 |
"""Yields examples."""
|
|
|
156 |
with open(filepath, encoding="utf-8") as f:
|
157 |
reader = csv.DictReader(f, delimiter="\t")
|
158 |
+
for idx, item in enumerate(reader):
|
|
|
159 |
item["answer_text"] = _parse_answer_text(item["answer_text"])
|
160 |
item["answer_coordinates"] = _parse_answer_coordinates(item["answer_coordinates"])
|
161 |
header, table_data = _load_table_data(os.path.join(data_dir, item["table_file"]))
|
162 |
item["table_header"] = header
|
163 |
item["table_data"] = table_data
|
164 |
+
yield idx, item
|