makhzan / dataset_infos.json
albertvillanova's picture
Add citation information to makhzan dataset (#4894)
{"default": {"description": "An Urdu text corpus for machine learning, natural language processing and linguistic analysis.\n", "citation": "\\\n@misc{makhzan,\ntitle={Ma\u1e35\u1e96zan},\nhowpublished = \"\\url{}\",\n}\n", "homepage": "", "license": "All files in the /text directory are covered under standard copyright. Each piece of text has been included in this repository with explicity permission of respective copyright holders, who are identified in the <meta> tag for each file. You are free to use this text for analysis, research and development, but you are not allowed to redistribute or republish this text. Some cases where a less restrictive license could apply to files in the /text directory are presented below. In some cases copyright free text has been digitally reproduced through the hard work of our collaborators. In such cases we have credited the appropriate people where possible in a notes field in the file's metadata, and we strongly encourage you to contact them before redistributing this text in any form. Where a separate license is provided along with the text, we have provided corresponding data in the publication field in a file's metadata.", "features": {"file_id": {"dtype": "string", "id": null, "_type": "Value"}, "metadata": {"dtype": "string", "id": null, "_type": "Value"}, "title": {"dtype": "string", "id": null, "_type": "Value"}, "num-words": {"dtype": "int64", "id": null, "_type": "Value"}, "contains-non-urdu-languages": {"dtype": "string", "id": null, "_type": "Value"}, "document_body": {"dtype": "string", "id": null, "_type": "Value"}}, "post_processed": null, "supervised_keys": null, "task_templates": null, "builder_name": "makhzan", "config_name": "default", "version": {"version_str": "1.0.0", "description": null, "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 35637310, "num_examples": 5522, "dataset_name": "makhzan"}}, "download_checksums": {"": {"num_bytes": 15187763, "checksum": "d50d5d168d20dae7a2b4da3e51c60be1fb0a15588dfbeb4b9cf31e87fe45d54e"}}, "download_size": 15187763, "post_processing_size": null, "dataset_size": 35637310, "size_in_bytes": 50825073}}