Commit
•
85823cb
1
Parent(s):
f362bc7
Delete legacy JSON metadata (#3)
Browse files- Delete legacy JSON metadata (acd4e26ce84c7d20ee4f1544cb6edbdc5e4074b0)
- dataset_infos.json +0 -1
dataset_infos.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"default": {"description": "\nHeadline-generation on a corpus of article pairs from Gigaword consisting of\naround 4 million articles. Use the 'org_data' provided by\nhttps://github.com/microsoft/unilm/ which is identical to\nhttps://github.com/harvardnlp/sent-summary but with better format.\n\nThere are two features:\n - document: article.\n - summary: headline.\n\n", "citation": "\n@article{graff2003english,\n title={English gigaword},\n author={Graff, David and Kong, Junbo and Chen, Ke and Maeda, Kazuaki},\n journal={Linguistic Data Consortium, Philadelphia},\n volume={4},\n number={1},\n pages={34},\n year={2003}\n}\n\n@article{Rush_2015,\n title={A Neural Attention Model for Abstractive Sentence Summarization},\n url={http://dx.doi.org/10.18653/v1/D15-1044},\n DOI={10.18653/v1/d15-1044},\n journal={Proceedings of the 2015 Conference on Empirical Methods in Natural Language Processing},\n publisher={Association for Computational Linguistics},\n author={Rush, Alexander M. and Chopra, Sumit and Weston, Jason},\n year={2015}\n}\n", "homepage": "https://github.com/harvardnlp/sent-summary", "license": "", "features": {"document": {"dtype": "string", "id": null, "_type": "Value"}, "summary": {"dtype": "string", "id": null, "_type": "Value"}}, "post_processed": null, "supervised_keys": {"input": "document", "output": "summary"}, "task_templates": null, "builder_name": "gigaword", "config_name": "default", "version": {"version_str": "1.2.0", "description": null, "major": 1, "minor": 2, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 915249388, "num_examples": 3803957, "dataset_name": "gigaword"}, "validation": {"name": "validation", "num_bytes": 45767096, "num_examples": 189651, "dataset_name": "gigaword"}, "test": {"name": "test", "num_bytes": 450782, "num_examples": 1951, "dataset_name": "gigaword"}}, "download_checksums": {"https://drive.google.com/uc?export=download&id=1USoQ8lJgN8kAWnUnRrupMGrPMLlDVqlV": {"num_bytes": 578402958, "checksum": "bc0c4a2e1aa19cf2123688b87bc2d778c0d8fc24a4090e3c10a27c5faa1b898b"}}, "download_size": 578402958, "post_processing_size": null, "dataset_size": 961467266, "size_in_bytes": 1539870224}}
|
|
|
|