Datasets:
Tasks:
Question Answering
Modalities:
Text
Formats:
parquet
Sub-tasks:
extractive-qa
Languages:
English
Size:
100K - 1M
ArXiv:
License:
Commit
•
5effdb8
1
Parent(s):
e117573
Delete legacy JSON metadata (#2)
Browse files- Delete legacy JSON metadata (98f87cc1ae3028c8b706cd412147270db92618d0)
- dataset_infos.json +0 -1
dataset_infos.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"plain_text": {"description": "The MRQA 2019 Shared Task focuses on generalization in question answering.\nAn effective question answering system should do more than merely\ninterpolate from the training set to answer test examples drawn\nfrom the same distribution: it should also be able to extrapolate\nto out-of-distribution examples \u2014 a significantly harder challenge.\n\nThe dataset is a collection of 18 existing QA dataset (carefully selected\nsubset of them) and converted to the same format (SQuAD format). Among\nthese 18 datasets, six datasets were made available for training,\nsix datasets were made available for development, and the final six\nfor testing. The dataset is released as part of the MRQA 2019 Shared Task.\n", "citation": "@inproceedings{fisch2019mrqa,\n title={{MRQA} 2019 Shared Task: Evaluating Generalization in Reading Comprehension},\n author={Adam Fisch and Alon Talmor and Robin Jia and Minjoon Seo and Eunsol Choi and Danqi Chen},\n booktitle={Proceedings of 2nd Machine Reading for Reading Comprehension (MRQA) Workshop at EMNLP},\n year={2019},\n}\n", "homepage": "https://mrqa.github.io/2019/shared.html", "license": "Unknwon", "features": {"subset": {"dtype": "string", "id": null, "_type": "Value"}, "context": {"dtype": "string", "id": null, "_type": "Value"}, "context_tokens": {"feature": {"tokens": {"dtype": "string", "id": null, "_type": "Value"}, "offsets": {"dtype": "int32", "id": null, "_type": "Value"}}, "length": -1, "id": null, "_type": "Sequence"}, "qid": {"dtype": "string", "id": null, "_type": "Value"}, "question": {"dtype": "string", "id": null, "_type": "Value"}, "question_tokens": {"feature": {"tokens": {"dtype": "string", "id": null, "_type": "Value"}, "offsets": {"dtype": "int32", "id": null, "_type": "Value"}}, "length": -1, "id": null, "_type": "Sequence"}, "detected_answers": {"feature": {"text": {"dtype": "string", "id": null, "_type": "Value"}, "char_spans": {"feature": {"start": {"dtype": "int32", "id": null, "_type": "Value"}, "end": {"dtype": "int32", "id": null, "_type": "Value"}}, "length": -1, "id": null, "_type": "Sequence"}, "token_spans": {"feature": {"start": {"dtype": "int32", "id": null, "_type": "Value"}, "end": {"dtype": "int32", "id": null, "_type": "Value"}}, "length": -1, "id": null, "_type": "Sequence"}}, "length": -1, "id": null, "_type": "Sequence"}, "answers": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}}, "post_processed": null, "supervised_keys": null, "builder_name": "mrqa", "config_name": "plain_text", "version": {"version_str": "1.1.0", "description": null, "major": 1, "minor": 1, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 4090681873, "num_examples": 516819, "dataset_name": "mrqa"}, "test": {"name": "test", "num_bytes": 57712177, "num_examples": 9633, "dataset_name": "mrqa"}, "validation": {"name": "validation", "num_bytes": 484107026, "num_examples": 58221, "dataset_name": "mrqa"}}, "download_checksums": {"https://s3.us-east-2.amazonaws.com/mrqa/release/v2/train/SQuAD.jsonl.gz": {"num_bytes": 27621835, "checksum": "b094703b9c6f740cc2dfd70b3201b833553fcec0c8a522f22c2c6ff82ce2cc78"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/train/NewsQA.jsonl.gz": {"num_bytes": 56451248, "checksum": "f1ccbf2d259ce1094aacde21a53592894248e5778814205dac94f0b086dbe968"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/train/TriviaQA-web.jsonl.gz": {"num_bytes": 356784923, "checksum": "61fad6884370408282ad3ed0b5f25a9e932d9a724b6929ea03ea5344ff0cd3f7"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/train/SearchQA.jsonl.gz": {"num_bytes": 641332495, "checksum": "32cda932667b7b65ab3079a8271d4e5726b4b989d0b862b25c77eb03a661b609"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/train/HotpotQA.jsonl.gz": {"num_bytes": 107394872, "checksum": "3a94712c073dc9f29d88ac149faa01ef9c7c089f97ee25d9cbac39387550825d"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/train/NaturalQuestionsShort.jsonl.gz": {"num_bytes": 116612493, "checksum": "6cdac324664b94b60be3203a077bf361d0bfa68a17af9b71def1186a6958a68c"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/SQuAD.jsonl.gz": {"num_bytes": 3474262, "checksum": "5afa4b088adf297fc29374ddf2d44d974b8837380e2554e62edf258fee5c32ee"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/NewsQA.jsonl.gz": {"num_bytes": 3142984, "checksum": "66bfb10cab2029bbc7d1afaece20c35fac341b1c179d15b70fde22a207f096ae"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/TriviaQA-web.jsonl.gz": {"num_bytes": 44971198, "checksum": "faf8add436de5a5fa81071a4e7190850d7e9a20acc811439e8a127ba8ec25640"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/SearchQA.jsonl.gz": {"num_bytes": 92526612, "checksum": "c84d2cc02cac5aa9d576ce1cd22900e9d75fe8a37bc795901c36cae6ef9e5ff0"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/HotpotQA.jsonl.gz": {"num_bytes": 10029807, "checksum": "43bb9291525d8b59229ba327b67cca42f0a9c23798c455f6fbe813e9979cca84"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/NaturalQuestionsShort.jsonl.gz": {"num_bytes": 10424248, "checksum": "2ba8b2181b520f81b49d62c0e4a23819f33d5dec0e8cf4a623edcda0feb73530"}, "http://participants-area.bioasq.org/MRQA2019/": {"num_bytes": 2666134, "checksum": "d8f237baea33bd0f4a664ef37ccd893cc682fd9458383dc1d1b8eb4685bb9efc"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/DROP.jsonl.gz": {"num_bytes": 592127, "checksum": "3f7b6b8131cd523d4451e98cf24adc53a92519763597261d28ae83f3920849ab"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/DuoRC.ParaphraseRC.jsonl.gz": {"num_bytes": 1197881, "checksum": "aeb8b9a31044be2ba3d62a456d61b2d447ff76dabe6fa77260b6efed0fb4c010"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/RACE.jsonl.gz": {"num_bytes": 1563018, "checksum": "c620ca043c78504ea02d1cef494207c6c76a5e5dedd7976f5fed5eb9724864b8"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/RelationExtraction.jsonl.gz": {"num_bytes": 850817, "checksum": "845668398356208246605fa1f363de63b45848c946d56514edcc8d00d12530ea"}, "https://s3.us-east-2.amazonaws.com/mrqa/release/v2/dev/TextbookQA.jsonl.gz": {"num_bytes": 1881401, "checksum": "1e861f197e739ead1947c60fa0917a02205dd48a559502194d7085ccd8608b64"}}, "download_size": 1479518355, "post_processing_size": null, "dataset_size": 4632501076, "size_in_bytes": 6112019431}}
|
|
|
|