albertvillanova HF staff commited on
Commit
498dbe6
1 Parent(s): 21a716f

Delete legacy JSON metadata (#4)

Browse files

- Delete legacy JSON metadata (10bf584101cd2711c4746ecd413bea8bc06c998d)

Files changed (1) hide show
  1. dataset_infos.json +0 -1
dataset_infos.json DELETED
@@ -1 +0,0 @@
1
- {"default": {"description": "The objective of this task is to detect hate speech in tweets. For the sake of simplicity, we say a tweet contains hate speech if it has a racist or sexist sentiment associated with it. So, the task is to classify racist or sexist tweets from other tweets.\n\nFormally, given a training sample of tweets and labels, where label \u20181\u2019 denotes the tweet is racist/sexist and label \u20180\u2019 denotes the tweet is not racist/sexist, your objective is to predict the labels on the given test dataset.\n", "citation": "@InProceedings{Z\nRoshan Sharma:dataset,\ntitle = {Sentimental Analysis of Tweets for Detecting Hate/Racist Speeches},\nauthors={Roshan Sharma},\nyear={2018}\n}\n", "homepage": "https://github.com/sharmaroshan/Twitter-Sentiment-Analysis", "license": "", "features": {"label": {"num_classes": 2, "names": ["no-hate-speech", "hate-speech"], "names_file": null, "id": null, "_type": "ClassLabel"}, "tweet": {"dtype": "string", "id": null, "_type": "Value"}}, "post_processed": null, "supervised_keys": null, "task_templates": [{"task": "text-classification", "text_column": "tweet", "label_column": "label", "labels": ["hate-speech", "no-hate-speech"]}], "builder_name": "tweets_hate_speech_detection", "config_name": "default", "version": {"version_str": "0.0.0", "description": null, "major": 0, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 3191776, "num_examples": 31962, "dataset_name": "tweets_hate_speech_detection"}}, "download_checksums": {"https://raw.githubusercontent.com/sharmaroshan/Twitter-Sentiment-Analysis/master/train_tweet.csv": {"num_bytes": 3103165, "checksum": "4f1bfabc2908029734fb2acd34028a8dfd1b92098bddfe60b0f0836c964e26ab"}}, "download_size": 3103165, "post_processing_size": null, "dataset_size": 3191776, "size_in_bytes": 6294941}}