Datasets:
Commit
•
d92d7ab
0
Parent(s):
Update files from the datasets library (from 1.2.0)
Browse filesRelease notes: https://github.com/huggingface/datasets/releases/tag/1.2.0
- .gitattributes +27 -0
- README.md +167 -0
- dataset_infos.json +1 -0
- dummy/album/1.1.0/dummy_data.zip +3 -0
- dummy/animal/1.1.0/dummy_data.zip +3 -0
- dummy/artist/1.1.0/dummy_data.zip +3 -0
- dummy/building/1.1.0/dummy_data.zip +3 -0
- dummy/company/1.1.0/dummy_data.zip +3 -0
- dummy/educational_institution/1.1.0/dummy_data.zip +3 -0
- dummy/event/1.1.0/dummy_data.zip +3 -0
- dummy/film/1.1.0/dummy_data.zip +3 -0
- dummy/group/1.1.0/dummy_data.zip +3 -0
- dummy/historic_place/1.1.0/dummy_data.zip +3 -0
- dummy/infrastructure/1.1.0/dummy_data.zip +3 -0
- dummy/mean_of_transportation/1.1.0/dummy_data.zip +3 -0
- dummy/office_holder/1.1.0/dummy_data.zip +3 -0
- dummy/plant/1.1.0/dummy_data.zip +3 -0
- dummy/single/1.1.0/dummy_data.zip +3 -0
- dummy/soccer_player/1.1.0/dummy_data.zip +3 -0
- dummy/software/1.1.0/dummy_data.zip +3 -0
- dummy/television_show/1.1.0/dummy_data.zip +3 -0
- dummy/town/1.1.0/dummy_data.zip +3 -0
- dummy/written_work/1.1.0/dummy_data.zip +3 -0
- wiki_asp.py +208 -0
.gitattributes
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
4 |
+
*.bin.* filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
11 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
12 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
13 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
14 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
15 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
16 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
17 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
18 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
19 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
20 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
21 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
22 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
23 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
24 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
25 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
26 |
+
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
27 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
annotations_creators:
|
3 |
+
- crowdsourced
|
4 |
+
language_creators:
|
5 |
+
- crowdsourced
|
6 |
+
languages:
|
7 |
+
- en
|
8 |
+
licenses:
|
9 |
+
- cc-by-sa-4-0
|
10 |
+
multilinguality:
|
11 |
+
- monolingual
|
12 |
+
size_categories:
|
13 |
+
- 10K<n<100K
|
14 |
+
source_datasets:
|
15 |
+
- original
|
16 |
+
task_categories:
|
17 |
+
- conditional-text-generation
|
18 |
+
task_ids:
|
19 |
+
- summarization
|
20 |
+
---
|
21 |
+
|
22 |
+
# Dataset Card Creation Guide
|
23 |
+
|
24 |
+
## Table of Contents
|
25 |
+
- [Dataset Description](#dataset-description)
|
26 |
+
- [Dataset Summary](#dataset-summary)
|
27 |
+
- [Supported Tasks](#supported-tasks-and-leaderboards)
|
28 |
+
- [Languages](#languages)
|
29 |
+
- [Dataset Structure](#dataset-structure)
|
30 |
+
- [Data Instances](#data-instances)
|
31 |
+
- [Data Fields](#data-instances)
|
32 |
+
- [Data Splits](#data-instances)
|
33 |
+
- [Dataset Creation](#dataset-creation)
|
34 |
+
- [Curation Rationale](#curation-rationale)
|
35 |
+
- [Source Data](#source-data)
|
36 |
+
- [Annotations](#annotations)
|
37 |
+
- [Personal and Sensitive Information](#personal-and-sensitive-information)
|
38 |
+
- [Considerations for Using the Data](#considerations-for-using-the-data)
|
39 |
+
- [Social Impact of Dataset](#social-impact-of-dataset)
|
40 |
+
- [Discussion of Biases](#discussion-of-biases)
|
41 |
+
- [Other Known Limitations](#other-known-limitations)
|
42 |
+
- [Additional Information](#additional-information)
|
43 |
+
- [Dataset Curators](#dataset-curators)
|
44 |
+
- [Licensing Information](#licensing-information)
|
45 |
+
- [Citation Information](#citation-information)
|
46 |
+
|
47 |
+
## Dataset Description
|
48 |
+
|
49 |
+
- **Homepage:** [Wiki Asp](https://github.com/neulab/wikiasp)
|
50 |
+
- **Repository:** [GitHub](https://github.com/neulab/wikiasp)
|
51 |
+
- **Paper:** [WikiAsp: A Dataset for Multi-domain Aspect-based Summarization](https://arxiv.org/abs/2011.07832)
|
52 |
+
|
53 |
+
### Dataset Summary
|
54 |
+
|
55 |
+
[More Information Needed]
|
56 |
+
|
57 |
+
### Supported Tasks and Leaderboards
|
58 |
+
|
59 |
+
[More Information Needed]
|
60 |
+
|
61 |
+
### Languages
|
62 |
+
|
63 |
+
[More Information Needed]
|
64 |
+
|
65 |
+
## Dataset Structure
|
66 |
+
|
67 |
+
### Data Instances
|
68 |
+
|
69 |
+
An example from the "plant" configuration:
|
70 |
+
```
|
71 |
+
{
|
72 |
+
'exid': 'train-78-8',
|
73 |
+
'inputs': ['< EOT > calcareous rocks and barrens , wooded cliff edges .',
|
74 |
+
'plant an erect short - lived perennial ( or biennial ) herb whose slender leafy stems radiate from the base , and are 3 - 5 dm tall , giving it a bushy appearance .',
|
75 |
+
'leaves densely hairy , grayish - green , simple and alternate on the stem .',
|
76 |
+
'flowers are bright yellow to yellow - orange , cross - shaped , each having 4 spatula - shaped petals about 5 mm long .',
|
77 |
+
'fruit is a nearly globe - shaped capsule , about 3 mm in diameter , with 1 or 2 seeds in each cell .',
|
78 |
+
'flowering period : early april to late may .',
|
79 |
+
'even though there are many members of the mustard family in the range of this species , no other plant shares this combination of characters : bright yellow flowers , grayish - green stems and foliage , globe - shaped fruits with a long style , perennial habit , and the habitat of limestone rocky cliffs .',
|
80 |
+
'timber removal may be beneficial and even needed to maintain the open character of the habitat for this species .',
|
81 |
+
'hand removal of trees in the vicinity of the population is necessary to avoid impacts from timber operations .',
|
82 |
+
'southwest indiana , north central kentucky , and north central tennessee .',
|
83 |
+
'email : naturepreserves @ ky . gov feedback naturepreserves @ ky . gov | about the agency | about this site copyright © 2003 - 2013 commonwealth of kentucky .',
|
84 |
+
'all rights reserved .',
|
85 |
+
'<EOS>'
|
86 |
+
],
|
87 |
+
'targets': [
|
88 |
+
['description',
|
89 |
+
'physaria globosa is a small plant covered with dense hairs giving it a grayish appearance . it produces yellow flowers in the spring , and its fruit is globe - shaped . its preferred habitat is dry limestone cliffs , barrens , cedar glades , steep wooded slopes , and talus areas . some have also been found in areas of deeper soil and roadsides .'
|
90 |
+
],
|
91 |
+
['conservation',
|
92 |
+
'the population fluctuates year to year , but on average there are about 2000 living plants at any one time , divided among 33 known locations . threats include forms of habitat degradation and destruction , including road construction and grading , mowing , dumping , herbicides , alteration of waterways , livestock damage , and invasive species of plants such as japanese honeysuckle , garlic mustard , alsike clover , sweet clover , meadow fescue , and multiflora rose . all populations are considered vulnerable to extirpation .'
|
93 |
+
]
|
94 |
+
]
|
95 |
+
}
|
96 |
+
```
|
97 |
+
|
98 |
+
### Data Fields
|
99 |
+
|
100 |
+
- `exid`: a unique identifier
|
101 |
+
- `input`: the cited references and consists of tokenized sentences (with NLTK)
|
102 |
+
- `targets`: a list of aspect-based summaries, where each element is a pair of a) the target aspect and b) the aspect-based summary
|
103 |
+
|
104 |
+
### Data Splits
|
105 |
+
|
106 |
+
[More Information Needed]
|
107 |
+
## Dataset Creation
|
108 |
+
|
109 |
+
### Curation Rationale
|
110 |
+
|
111 |
+
[More Information Needed]
|
112 |
+
|
113 |
+
### Source Data
|
114 |
+
|
115 |
+
[More Information Needed]
|
116 |
+
|
117 |
+
#### Initial Data Collection and Normalization
|
118 |
+
|
119 |
+
[More Information Needed]
|
120 |
+
|
121 |
+
#### Who are the source language producers?
|
122 |
+
|
123 |
+
[More Information Needed]
|
124 |
+
|
125 |
+
### Annotations
|
126 |
+
|
127 |
+
[More Information Needed]
|
128 |
+
|
129 |
+
#### Annotation process
|
130 |
+
|
131 |
+
[More Information Needed]
|
132 |
+
|
133 |
+
#### Who are the annotators?
|
134 |
+
|
135 |
+
[More Information Needed]
|
136 |
+
|
137 |
+
### Personal and Sensitive Information
|
138 |
+
|
139 |
+
[More Information Needed]
|
140 |
+
|
141 |
+
## Considerations for Using the Data
|
142 |
+
|
143 |
+
### Social Impact of Dataset
|
144 |
+
|
145 |
+
[More Information Needed]
|
146 |
+
|
147 |
+
### Discussion of Biases
|
148 |
+
|
149 |
+
[More Information Needed]
|
150 |
+
|
151 |
+
### Other Known Limitations
|
152 |
+
|
153 |
+
[More Information Needed]
|
154 |
+
|
155 |
+
## Additional Information
|
156 |
+
|
157 |
+
### Dataset Curators
|
158 |
+
|
159 |
+
[More Information Needed]
|
160 |
+
|
161 |
+
### Licensing Information
|
162 |
+
|
163 |
+
[More Information Needed]
|
164 |
+
|
165 |
+
### Citation Information
|
166 |
+
|
167 |
+
[More Information Needed]
|
dataset_infos.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"album": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "album", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1907323642, "num_examples": 24434, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 232999001, "num_examples": 3038, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 234990092, "num_examples": 3104, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Album.tar.bz2": {"num_bytes": 644173065, "checksum": "ec3076e3979438ca331d38f7fbcd1c00fc4149e7ace9002ea848e1c56c57abec"}}, "download_size": 644173065, "post_processing_size": null, "dataset_size": 2375312735, "size_in_bytes": 3019485800}, "animal": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "animal", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 497474133, "num_examples": 16540, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 61315970, "num_examples": 2007, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 57943532, "num_examples": 2005, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Animal.tar.bz2": {"num_bytes": 150974930, "checksum": "8e7b444e34c3e7be3e4eb10fcfb0d0f06e92f50cfb91a18f9680e5fb5c9f03fb"}}, "download_size": 150974930, "post_processing_size": null, "dataset_size": 616733635, "size_in_bytes": 767708565}, "artist": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "artist", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1876134255, "num_examples": 26754, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 237751553, "num_examples": 3329, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 223240910, "num_examples": 3194, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Artist.tar.bz2": {"num_bytes": 626686303, "checksum": "33a4e6402062a115f85e0ce41388c2fdb5e2beee07ca28cdde2cdcfb68aff2a7"}}, "download_size": 626686303, "post_processing_size": null, "dataset_size": 2337126718, "size_in_bytes": 2963813021}, "building": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "building", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1100057273, "num_examples": 20449, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 134357678, "num_examples": 2482, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 139387376, "num_examples": 2607, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Building.tar.bz2": {"num_bytes": 346224042, "checksum": "31a008d22a7853cf4c1a1b11dee8247ff554b148316d1f955a1c0585dbe61f3f"}}, "download_size": 346224042, "post_processing_size": null, "dataset_size": 1373802327, "size_in_bytes": 1720026369}, "company": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "company", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1606057076, "num_examples": 24353, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 199282041, "num_examples": 3029, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 200498778, "num_examples": 2946, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Company.tar.bz2": {"num_bytes": 504194353, "checksum": "59214a695e62b7802bfe2e6ce11e428a66c0e2ad7397a486d4dffef0ac1c5cc3"}}, "download_size": 504194353, "post_processing_size": null, "dataset_size": 2005837895, "size_in_bytes": 2510032248}, "educational_institution": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "educational_institution", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1623000534, "num_examples": 17634, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 200476681, "num_examples": 2267, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 203262430, "num_examples": 2141, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/EducationalInstitution.tar.bz2": {"num_bytes": 471033992, "checksum": "603befa19fce03cde1027db4373693d77318dd7eae54a432867a2c7f06d993de"}}, "download_size": 471033992, "post_processing_size": null, "dataset_size": 2026739645, "size_in_bytes": 2497773637}, "event": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "event", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 748201660, "num_examples": 6475, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 96212295, "num_examples": 828, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 97431395, "num_examples": 807, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Event.tar.bz2": {"num_bytes": 240072903, "checksum": "f90cee3b02e3e542cf9b08a37b7af1918467dd0e9bff080ffbc3fefc7bc7a672"}}, "download_size": 240072903, "post_processing_size": null, "dataset_size": 941845350, "size_in_bytes": 1181918253}, "film": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "film", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 2370068027, "num_examples": 32129, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 294918370, "num_examples": 3981, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 290240851, "num_examples": 4014, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Film.tar.bz2": {"num_bytes": 808231638, "checksum": "4e849cc085540119ea61f67ad581795fca8dafaa8f6e8c51a5e7af314aa42c0c"}}, "download_size": 808231638, "post_processing_size": null, "dataset_size": 2955227248, "size_in_bytes": 3763458886}, "group": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "group", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1025166800, "num_examples": 11966, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 114239405, "num_examples": 1444, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 120863870, "num_examples": 1462, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Group.tar.bz2": {"num_bytes": 344498865, "checksum": "1269d3bbcdf3b106f2600f0e923ac34eb208307c183184f240243bd83f9ba094"}}, "download_size": 344498865, "post_processing_size": null, "dataset_size": 1260270075, "size_in_bytes": 1604768940}, "historic_place": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "historic_place", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 256158020, "num_examples": 4919, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 31201154, "num_examples": 600, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 29058067, "num_examples": 601, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/HistoricPlace.tar.bz2": {"num_bytes": 77289509, "checksum": "257aa0633f810d1d551993eca288e27c8dfccd20020199d897556b4be2ed8e7a"}}, "download_size": 77289509, "post_processing_size": null, "dataset_size": 316417241, "size_in_bytes": 393706750}, "infrastructure": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "infrastructure", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1124486451, "num_examples": 17226, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 134820330, "num_examples": 2091, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 125193140, "num_examples": 1984, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Infrastructure.tar.bz2": {"num_bytes": 328804337, "checksum": "a9ef52b3216fa69e9d1c24353c87f5702f3ea06c97c0b1420add22d2686c02e4"}}, "download_size": 328804337, "post_processing_size": null, "dataset_size": 1384499921, "size_in_bytes": 1713304258}, "mean_of_transportation": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "mean_of_transportation", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 650424738, "num_examples": 9277, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 89759392, "num_examples": 1170, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 88440901, "num_examples": 1215, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/MeanOfTransportation.tar.bz2": {"num_bytes": 210234418, "checksum": "a8a8e42bca6e0db465343f659724aedc8125b47c6291886a4df97a3896f969b4"}}, "download_size": 210234418, "post_processing_size": null, "dataset_size": 828625031, "size_in_bytes": 1038859449}, "office_holder": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "office_holder", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1643899203, "num_examples": 18177, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 207433317, "num_examples": 2333, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 202624275, "num_examples": 2218, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/OfficeHolder.tar.bz2": {"num_bytes": 524721727, "checksum": "608a67ae3330e303d11a38067506a46d66f314f6390d985870db1a36fa5cf253"}}, "download_size": 524721727, "post_processing_size": null, "dataset_size": 2053956795, "size_in_bytes": 2578678522}, "plant": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "plant", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 239150885, "num_examples": 6107, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 31340125, "num_examples": 774, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 28752150, "num_examples": 786, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Plant.tar.bz2": {"num_bytes": 77890632, "checksum": "bdcd8c655ec94c33919d30b3bcb8e45f6d6b267af25a61caafd56a9a046a8871"}}, "download_size": 77890632, "post_processing_size": null, "dataset_size": 299243160, "size_in_bytes": 377133792}, "single": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "single", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1277277277, "num_examples": 14217, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 152328537, "num_examples": 1712, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 160312594, "num_examples": 1734, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Single.tar.bz2": {"num_bytes": 429214401, "checksum": "f1c96a5e8e14fc617a15350865a93e674b469434aeb6a439a0b1eb53dc8d18db"}}, "download_size": 429214401, "post_processing_size": null, "dataset_size": 1589918408, "size_in_bytes": 2019132809}, "soccer_player": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "soccer_player", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 604502541, "num_examples": 17599, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 72820378, "num_examples": 2280, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 76705685, "num_examples": 2150, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/SoccerPlayer.tar.bz2": {"num_bytes": 193347234, "checksum": "161be8b7a615a4a837b71f01f5d58274a334bd9bee6e0464ab196956d03d2638"}}, "download_size": 193347234, "post_processing_size": null, "dataset_size": 754028604, "size_in_bytes": 947375838}, "software": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "software", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1122906186, "num_examples": 13516, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 133717992, "num_examples": 1638, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 134578157, "num_examples": 1637, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Software.tar.bz2": {"num_bytes": 356764908, "checksum": "4cd469f23b00592b37002d81eb23a1cabf06bee9e353460aeb9055e693dd4d08"}}, "download_size": 356764908, "post_processing_size": null, "dataset_size": 1391202335, "size_in_bytes": 1747967243}, "television_show": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "television_show", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 893325347, "num_examples": 8717, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 115155155, "num_examples": 1072, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 119461892, "num_examples": 1128, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/TelevisionShow.tar.bz2": {"num_bytes": 302093407, "checksum": "958b264870549be307a4ef0b222d23e45b2670dec7639e5bc150f8f98632cc36"}}, "download_size": 302093407, "post_processing_size": null, "dataset_size": 1127942394, "size_in_bytes": 1430035801}, "town": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "town", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 772504751, "num_examples": 14818, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 100975827, "num_examples": 1831, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 101522638, "num_examples": 1911, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/Town.tar.bz2": {"num_bytes": 243261734, "checksum": "b71d50011d50f7b9131a7cc0f28a0da97c6d2f4421f2b8c7d86e329d85492504"}}, "download_size": 243261734, "post_processing_size": null, "dataset_size": 975003216, "size_in_bytes": 1218264950}, "written_work": {"description": "WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic\ndomain. In this task, models are asked to summarize cited reference documents of a\nWikipedia article into aspect-based summaries. Each of the 20 domains include 10\ndomain-specific pre-defined aspects.\n", "citation": "@article{hayashi20tacl,\n title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},\n authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},\n journal = {Transactions of the Association for Computational Linguistics (TACL)},\n year = {2020},\n url = {https://arxiv.org/abs/2011.07832}\n}\n", "homepage": "https://github.com/neulab/wikiasp", "license": "CC BY-SA 4.0", "features": {"exid": {"dtype": "string", "id": null, "_type": "Value"}, "inputs": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "targets": {"feature": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "wiki_asp", "config_name": "written_work", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1491395960, "num_examples": 15065, "dataset_name": "wiki_asp"}, "test": {"name": "test", "num_bytes": 189537205, "num_examples": 1931, "dataset_name": "wiki_asp"}, "validation": {"name": "validation", "num_bytes": 185707567, "num_examples": 1843, "dataset_name": "wiki_asp"}}, "download_checksums": {"http://phontron.com/download/wikiasp/WrittenWork.tar.bz2": {"num_bytes": 498307235, "checksum": "77d59d38d1477f90a9719af69be069e22b6261e8c2a0b602c3572a15e9dfcd41"}}, "download_size": 498307235, "post_processing_size": null, "dataset_size": 1866640732, "size_in_bytes": 2364947967}}
|
dummy/album/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f7a95301cfc72da6df41dcc0ee94241f7e53f8be1a560014ef8cd614c7e4601b
|
3 |
+
size 17429
|
dummy/animal/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:117c71f6809cee8ef7f5c19f0640e8ab513d48a1590f963167861a8b670af4db
|
3 |
+
size 16557
|
dummy/artist/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6b7ed5ccb4e5e075fdc863d22f6af616e4f3c2a0c8124d76554081243415a593
|
3 |
+
size 24039
|
dummy/building/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cb5e62acb1bfc4d8e8ad9425da99c9ba24b8a3b24bfbb7ed7c27e9dc0c6a0a8d
|
3 |
+
size 25505
|
dummy/company/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9843729333677506bd2352e6700a609edba3d7bdcab2f2c89aae599fb197c381
|
3 |
+
size 23495
|
dummy/educational_institution/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:25ce936ca6773cf3c068d1112254504b07248c5be9ca025c1ee4a8ceb95f2cda
|
3 |
+
size 23216
|
dummy/event/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8c129e517b56ab9a9681835dbae79aebaaa1cc6909f2756227e7648fe4a9347a
|
3 |
+
size 23986
|
dummy/film/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:da03adecbd5a6602584f73dcc575a10489fc43ff7df980e139bb67fbba2abf6b
|
3 |
+
size 28579
|
dummy/group/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:79d490d0bf1ac0cf4bd883947f134eb7168ec585b97801f744dd7edf847551c6
|
3 |
+
size 21335
|
dummy/historic_place/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:82d1a8b9c739a5de6c2a15065faca883e4227e60b9c8eb5aada62be14854dde3
|
3 |
+
size 41101
|
dummy/infrastructure/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a91ea124b249dbbb4fde43dade9f3e17fda2c199e05e542ae29c8864df9e0a4c
|
3 |
+
size 20863
|
dummy/mean_of_transportation/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b09fd0aeb9d1d2cb73c60c167f78275e1967e2b60c795c4ae815d5806a0c882
|
3 |
+
size 33093
|
dummy/office_holder/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e95fbb7928fbbfec77b1bea10386cc8b98f5c616ee3b9c33fe6771d806e0ccac
|
3 |
+
size 19266
|
dummy/plant/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0dbacb9ffc2f519a0492366ba599fedd65de9ff92a1ba4d2df3a161d0f05a682
|
3 |
+
size 14991
|
dummy/single/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e07bde4339c2c2b8b4c4fecaf29dd8562ded15c6cc79009699927c86652a2967
|
3 |
+
size 18601
|
dummy/soccer_player/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:722ada657bf24f53c128e4ba29d1c60043824293766d893a79b18caf67604a6b
|
3 |
+
size 16250
|
dummy/software/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:285bdbb01ad0f6585371014d50f8afb362fbefac259e9cccb2234e44c7065014
|
3 |
+
size 18091
|
dummy/television_show/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b20a14ec699cb19fac1e174cc5684877dec2a616ca6fcf58da79e9003ea53956
|
3 |
+
size 20614
|
dummy/town/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17ca95f6d2b7c730e15b9aeab1aecd9b738289c62d567bb3ee7c513eac554a97
|
3 |
+
size 24835
|
dummy/written_work/1.1.0/dummy_data.zip
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e4c99b4820d19e2953115f4cb0296e106acb1d07c0d86e351c384a6759eccddf
|
3 |
+
size 29352
|
wiki_asp.py
ADDED
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# coding=utf-8
|
2 |
+
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
|
3 |
+
#
|
4 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
+
# you may not use this file except in compliance with the License.
|
6 |
+
# You may obtain a copy of the License at
|
7 |
+
#
|
8 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
+
#
|
10 |
+
# Unless required by applicable law or agreed to in writing, software
|
11 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
+
# See the License for the specific language governing permissions and
|
14 |
+
# limitations under the License.
|
15 |
+
"""Wiki Asp datasert for Multi-domain Aspect-based Summarization"""
|
16 |
+
|
17 |
+
from __future__ import absolute_import, division, print_function
|
18 |
+
|
19 |
+
import json
|
20 |
+
import os
|
21 |
+
|
22 |
+
import datasets
|
23 |
+
|
24 |
+
|
25 |
+
# Find for instance the citation on arxiv or on the dataset repo/website
|
26 |
+
_CITATION = """\
|
27 |
+
@article{hayashi20tacl,
|
28 |
+
title = {WikiAsp: A Dataset for Multi-domain Aspect-based Summarization},
|
29 |
+
authors = {Hiroaki Hayashi and Prashant Budania and Peng Wang and Chris Ackerson and Raj Neervannan and Graham Neubig},
|
30 |
+
journal = {Transactions of the Association for Computational Linguistics (TACL)},
|
31 |
+
year = {2020},
|
32 |
+
url = {https://arxiv.org/abs/2011.07832}
|
33 |
+
}
|
34 |
+
"""
|
35 |
+
|
36 |
+
_DESCRIPTION = """\
|
37 |
+
WikiAsp is a multi-domain, aspect-based summarization dataset in the encyclopedic
|
38 |
+
domain. In this task, models are asked to summarize cited reference documents of a
|
39 |
+
Wikipedia article into aspect-based summaries. Each of the 20 domains include 10
|
40 |
+
domain-specific pre-defined aspects.
|
41 |
+
"""
|
42 |
+
|
43 |
+
_HOMEPAGE = "https://github.com/neulab/wikiasp"
|
44 |
+
|
45 |
+
_LICENSE = "CC BY-SA 4.0"
|
46 |
+
|
47 |
+
# Download links
|
48 |
+
_URLs = {
|
49 |
+
"album": "http://phontron.com/download/wikiasp/Album.tar.bz2",
|
50 |
+
"animal": "http://phontron.com/download/wikiasp/Animal.tar.bz2",
|
51 |
+
"artist": "http://phontron.com/download/wikiasp/Artist.tar.bz2",
|
52 |
+
"building": "http://phontron.com/download/wikiasp/Building.tar.bz2",
|
53 |
+
"company": "http://phontron.com/download/wikiasp/Company.tar.bz2",
|
54 |
+
"educational_institution": "http://phontron.com/download/wikiasp/EducationalInstitution.tar.bz2",
|
55 |
+
"event": "http://phontron.com/download/wikiasp/Event.tar.bz2",
|
56 |
+
"film": "http://phontron.com/download/wikiasp/Film.tar.bz2",
|
57 |
+
"group": "http://phontron.com/download/wikiasp/Group.tar.bz2",
|
58 |
+
"historic_place": "http://phontron.com/download/wikiasp/HistoricPlace.tar.bz2",
|
59 |
+
"infrastructure": "http://phontron.com/download/wikiasp/Infrastructure.tar.bz2",
|
60 |
+
"mean_of_transportation": "http://phontron.com/download/wikiasp/MeanOfTransportation.tar.bz2",
|
61 |
+
"office_holder": "http://phontron.com/download/wikiasp/OfficeHolder.tar.bz2",
|
62 |
+
"plant": "http://phontron.com/download/wikiasp/Plant.tar.bz2",
|
63 |
+
"single": "http://phontron.com/download/wikiasp/Single.tar.bz2",
|
64 |
+
"soccer_player": "http://phontron.com/download/wikiasp/SoccerPlayer.tar.bz2",
|
65 |
+
"software": "http://phontron.com/download/wikiasp/Software.tar.bz2",
|
66 |
+
"television_show": "http://phontron.com/download/wikiasp/TelevisionShow.tar.bz2",
|
67 |
+
"town": "http://phontron.com/download/wikiasp/Town.tar.bz2",
|
68 |
+
"written_work": "http://phontron.com/download/wikiasp/WrittenWork.tar.bz2",
|
69 |
+
}
|
70 |
+
|
71 |
+
|
72 |
+
class WikiAsp(datasets.GeneratorBasedBuilder):
|
73 |
+
"""TODO: Short description of my dataset."""
|
74 |
+
|
75 |
+
VERSION = datasets.Version("1.1.0")
|
76 |
+
|
77 |
+
# This is an example of a dataset with multiple configurations.
|
78 |
+
# If you don't want/need to define several sub-sets in your dataset,
|
79 |
+
# just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
|
80 |
+
|
81 |
+
# If you need to make complex sub-parts in the datasets with configurable options
|
82 |
+
# You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
|
83 |
+
# BUILDER_CONFIG_CLASS = MyBuilderConfig
|
84 |
+
|
85 |
+
# You will be able to load one or the other configurations in the following list with
|
86 |
+
# data = datasets.load_dataset('my_dataset', 'first_domain')
|
87 |
+
# data = datasets.load_dataset('my_dataset', 'second_domain')
|
88 |
+
BUILDER_CONFIGS = [
|
89 |
+
datasets.BuilderConfig(
|
90 |
+
name="album", version=VERSION, description="A subset of dataset from the musical album domain"
|
91 |
+
),
|
92 |
+
datasets.BuilderConfig(
|
93 |
+
name="animal", version=VERSION, description="A subset of dataset from the animal domain"
|
94 |
+
),
|
95 |
+
datasets.BuilderConfig(
|
96 |
+
name="artist", version=VERSION, description="A subset of dataset from the artist domain"
|
97 |
+
),
|
98 |
+
datasets.BuilderConfig(
|
99 |
+
name="building", version=VERSION, description="A subset of dataset from the buildings domain"
|
100 |
+
),
|
101 |
+
datasets.BuilderConfig(
|
102 |
+
name="company", version=VERSION, description="A subset of dataset from the company domain"
|
103 |
+
),
|
104 |
+
datasets.BuilderConfig(
|
105 |
+
name="educational_institution",
|
106 |
+
version=VERSION,
|
107 |
+
description="A subset of dataset from the educational institution domain",
|
108 |
+
),
|
109 |
+
datasets.BuilderConfig(
|
110 |
+
name="event", version=VERSION, description="A subset of dataset from the events domain"
|
111 |
+
),
|
112 |
+
datasets.BuilderConfig(name="film", version=VERSION, description="A subset of dataset from the film domain"),
|
113 |
+
datasets.BuilderConfig(name="group", version=VERSION, description="A subset of dataset from the group domain"),
|
114 |
+
datasets.BuilderConfig(
|
115 |
+
name="historic_place", version=VERSION, description="A subset of dataset from the historic places domain"
|
116 |
+
),
|
117 |
+
datasets.BuilderConfig(
|
118 |
+
name="infrastructure", version=VERSION, description="A subset of dataset from the infrastructure domain"
|
119 |
+
),
|
120 |
+
datasets.BuilderConfig(
|
121 |
+
name="mean_of_transportation",
|
122 |
+
version=VERSION,
|
123 |
+
description="A subset of dataset from the transportation mean domain",
|
124 |
+
),
|
125 |
+
datasets.BuilderConfig(
|
126 |
+
name="office_holder", version=VERSION, description="A subset of dataset from the office holder domain"
|
127 |
+
),
|
128 |
+
datasets.BuilderConfig(name="plant", version=VERSION, description="A subset of dataset from the plant domain"),
|
129 |
+
datasets.BuilderConfig(
|
130 |
+
name="single", version=VERSION, description="A subset of dataset from the musical single domain"
|
131 |
+
),
|
132 |
+
datasets.BuilderConfig(
|
133 |
+
name="soccer_player", version=VERSION, description="A subset of dataset from the soccer player domain"
|
134 |
+
),
|
135 |
+
datasets.BuilderConfig(
|
136 |
+
name="software", version=VERSION, description="A subset of dataset from the software domain"
|
137 |
+
),
|
138 |
+
datasets.BuilderConfig(
|
139 |
+
name="television_show", version=VERSION, description="A subset of dataset from the television show domain"
|
140 |
+
),
|
141 |
+
datasets.BuilderConfig(name="town", version=VERSION, description="A subset of dataset from the town domain"),
|
142 |
+
datasets.BuilderConfig(
|
143 |
+
name="written_work", version=VERSION, description="A subset of dataset from the written work domain"
|
144 |
+
),
|
145 |
+
]
|
146 |
+
|
147 |
+
def _info(self):
|
148 |
+
features = datasets.Features(
|
149 |
+
{
|
150 |
+
"exid": datasets.Value("string"),
|
151 |
+
"inputs": datasets.Sequence(datasets.Value("string")),
|
152 |
+
"targets": datasets.Sequence(datasets.Sequence(datasets.Value("string"))),
|
153 |
+
}
|
154 |
+
)
|
155 |
+
return datasets.DatasetInfo(
|
156 |
+
# This is the description that will appear on the datasets page.
|
157 |
+
description=_DESCRIPTION,
|
158 |
+
# This defines the different columns of the dataset and their types
|
159 |
+
features=features, # Here we define them above because they are different between the two configurations
|
160 |
+
# If there's a common (input, target) tuple from the features,
|
161 |
+
# specify them here. They'll be used if as_supervised=True in
|
162 |
+
# builder.as_dataset.
|
163 |
+
supervised_keys=None,
|
164 |
+
# Homepage of the dataset for documentation
|
165 |
+
homepage=_HOMEPAGE,
|
166 |
+
# License for the dataset if available
|
167 |
+
license=_LICENSE,
|
168 |
+
# Citation for the dataset
|
169 |
+
citation=_CITATION,
|
170 |
+
)
|
171 |
+
|
172 |
+
def _split_generators(self, dl_manager):
|
173 |
+
"""Returns SplitGenerators."""
|
174 |
+
my_urls = _URLs[self.config.name]
|
175 |
+
data_dir = dl_manager.download_and_extract(my_urls)
|
176 |
+
data_dir = os.path.join(data_dir, self.config.name.title().replace("_", ""))
|
177 |
+
return [
|
178 |
+
datasets.SplitGenerator(
|
179 |
+
name=datasets.Split.TRAIN,
|
180 |
+
# These kwargs will be passed to _generate_examples
|
181 |
+
gen_kwargs={
|
182 |
+
"filepath": os.path.join(data_dir, "train.jsonl"),
|
183 |
+
"split": "train",
|
184 |
+
},
|
185 |
+
),
|
186 |
+
datasets.SplitGenerator(
|
187 |
+
name=datasets.Split.TEST,
|
188 |
+
gen_kwargs={"filepath": os.path.join(data_dir, "test.jsonl"), "split": "test"},
|
189 |
+
),
|
190 |
+
datasets.SplitGenerator(
|
191 |
+
name=datasets.Split.VALIDATION,
|
192 |
+
gen_kwargs={
|
193 |
+
"filepath": os.path.join(data_dir, "valid.jsonl"),
|
194 |
+
"split": "dev",
|
195 |
+
},
|
196 |
+
),
|
197 |
+
]
|
198 |
+
|
199 |
+
def _generate_examples(self, filepath, split):
|
200 |
+
""" Yields examples. """
|
201 |
+
with open(filepath, encoding="utf-8") as f:
|
202 |
+
for id_, row in enumerate(f):
|
203 |
+
data = json.loads(row)
|
204 |
+
yield id_, {
|
205 |
+
"exid": data["exid"],
|
206 |
+
"inputs": data["inputs"],
|
207 |
+
"targets": data["targets"],
|
208 |
+
}
|