albertvillanova HF staff commited on
Commit
bd0f231
1 Parent(s): 593ced7

Delete legacy JSON metadata

Browse files

Delete legacy `dataset_infos.json`.

Files changed (1) hide show
  1. dataset_infos.json +0 -1
dataset_infos.json DELETED
@@ -1 +0,0 @@
1
- {"thaisum": {"description": "ThaiSum is a large-scale corpus for Thai text summarization obtained from several online news websites namely Thairath,\nThaiPBS, Prachathai, and The Standard. This dataset consists of over 350,000 article and summary pairs\nwritten by journalists.\n", "citation": "@mastersthesis{chumpolsathien_2020, \n title={Using Knowledge Distillation from Keyword Extraction to Improve the Informativeness of Neural Cross-lingual Summarization},\n author={Chumpolsathien, Nakhun}, \n year={2020}, \n school={Beijing Institute of Technology}\n", "homepage": "https://github.com/nakhunchumpolsathien/ThaiSum", "license": "", "features": {"title": {"dtype": "string", "id": null, "_type": "Value"}, "body": {"dtype": "string", "id": null, "_type": "Value"}, "summary": {"dtype": "string", "id": null, "_type": "Value"}, "type": {"dtype": "string", "id": null, "_type": "Value"}, "tags": {"dtype": "string", "id": null, "_type": "Value"}, "url": {"dtype": "string", "id": null, "_type": "Value"}}, "post_processed": null, "supervised_keys": {"input": "body", "output": "summary"}, "builder_name": "thaisum", "config_name": "thaisum", "version": {"version_str": "1.0.0", "description": null, "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 2945472406, "num_examples": 358868, "dataset_name": "thaisum"}, "validation": {"name": "validation", "num_bytes": 118437310, "num_examples": 11000, "dataset_name": "thaisum"}, "test": {"name": "test", "num_bytes": 119496704, "num_examples": 11000, "dataset_name": "thaisum"}}, "download_checksums": {"https://archive.org/download/thaisum_datasets/data.zip": {"num_bytes": 647582078, "checksum": "526610cc780ebe8c34c8bcd49d169861637fed426ba860fb1e9d48768599e1bf"}}, "download_size": 647582078, "post_processing_size": null, "dataset_size": 3183406420, "size_in_bytes": 3830988498}}