Delete legacy JSON metadata

#3
by albertvillanova HF staff - opened
Files changed (1) hide show
  1. dataset_infos.json +0 -1
dataset_infos.json DELETED
@@ -1 +0,0 @@
1
- {"default": {"description": " Contains 10k tweets (training set) that are labeled as hate speech or non-hate speech. Released with 4,232 validation and 4,232 testing samples. Collected during the 2016 Philippine Presidential Elections.\n", "citation": "@article{Cabasag-2019-hate-speech,\n title={Hate speech in Philippine election-related tweets: Automatic detection and classification using natural language processing.},\n author={Neil Vicente Cabasag, Vicente Raphael Chan, Sean Christian Lim, Mark Edward Gonzales, and Charibeth Cheng},\n journal={Philippine Computing Journal},\n volume={XIV},\n number={1},\n month={August},\n year={2019}\n}\n", "homepage": "https://github.com/jcblaisecruz02/Filipino-Text-Benchmarks", "license": "", "features": {"text": {"dtype": "string", "id": null, "_type": "Value"}, "label": {"num_classes": 2, "names": ["0", "1"], "names_file": null, "id": null, "_type": "ClassLabel"}}, "post_processed": null, "supervised_keys": null, "task_templates": [{"task": "text-classification", "text_column": "text", "label_column": "label", "labels": ["0", "1"]}], "builder_name": "hate_speech_filipino", "config_name": "default", "version": {"version_str": "1.0.0", "description": null, "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 995919, "num_examples": 10000, "dataset_name": "hate_speech_filipino"}, "test": {"name": "test", "num_bytes": 995919, "num_examples": 10000, "dataset_name": "hate_speech_filipino"}, "validation": {"name": "validation", "num_bytes": 424365, "num_examples": 4232, "dataset_name": "hate_speech_filipino"}}, "download_checksums": {"https://s3.us-east-2.amazonaws.com/blaisecruz.com/datasets/hatenonhate/hatespeech_raw.zip": {"num_bytes": 822927, "checksum": "c530a4b724f9893eef3cc911f9bc3bfbd31cfd03315c0b56462dd1987d6c94e1"}}, "download_size": 822927, "post_processing_size": null, "dataset_size": 2416203, "size_in_bytes": 3239130}}