Datasets:

Languages:
Turkish
Multilinguality:
monolingual
Size Categories:
10K<n<100K
Language Creators:
found
Annotations Creators:
found
Source Datasets:
original
License:
albertvillanova HF staff commited on
Commit
f5929ce
1 Parent(s): 43d001a

Delete legacy JSON metadata (#3)

Browse files

- Delete legacy JSON metadata (84a39ac258fb758fc3b0a7a5289f55598852056b)

Files changed (1) hide show
  1. dataset_infos.json +0 -1
dataset_infos.json DELETED
@@ -1 +0,0 @@
1
- {"offenseval2020-turkish": {"description": "OffensEval-TR 2020 is a Turkish offensive language corpus. The corpus consist of randomly sampled tweets, and annotated in a similar way to OffensEval and GermEval.\n", "citation": "@InProceedings{coltekin2020lrec,\n author = {Cagri Coltekin},\n year = {2020},\n title = {A Corpus of Turkish Offensive Language on Social Media},\n booktitle = {Proceedings of The 12th Language Resources and Evaluation Conference},\n pages = {6174--6184},\n address = {Marseille, France},\n url = {https://www.aclweb.org/anthology/2020.lrec-1.758},\n}\n", "homepage": "https://coltekin.github.io/offensive-turkish/", "license": "", "features": {"id": {"dtype": "int32", "id": null, "_type": "Value"}, "tweet": {"dtype": "string", "id": null, "_type": "Value"}, "subtask_a": {"num_classes": 2, "names": ["NOT", "OFF"], "names_file": null, "id": null, "_type": "ClassLabel"}}, "post_processed": null, "supervised_keys": null, "builder_name": "offenseval2020_tr", "config_name": "offenseval2020-turkish", "version": {"version_str": "1.0.0", "description": null, "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 4260505, "num_examples": 31756, "dataset_name": "offenseval2020_tr"}, "test": {"name": "test", "num_bytes": 481300, "num_examples": 3528, "dataset_name": "offenseval2020_tr"}}, "download_checksums": {"https://coltekin.github.io/offensive-turkish/offenseval2020-turkish.zip": {"num_bytes": 2048258, "checksum": "7977e96255dbc9b8d14893f1b14cbe3dec53c70358503c062c5a59720ec9c2f2"}}, "download_size": 2048258, "post_processing_size": null, "dataset_size": 4741805, "size_in_bytes": 6790063}}