""" TweetTopic Dataset """ import json from itertools import chain import datasets logger = datasets.logging.get_logger(__name__) _DESCRIPTION = """[TweetTopic](TBA)""" _VERSION = "1.0.1" _CITATION = """ TBA """ _HOME_PAGE = "https://cardiffnlp.github.io" _LABEL_TYPE = "multi" _NAME = f"tweet_topic_{_LABEL_TYPE}" _URL = f'https://huggingface.co/datasets/cardiffnlp/{_NAME}/raw/main/dataset' _URLS = { str(datasets.Split.TEST): [f'{_URL}/split_temporal/test_2021.{_LABEL_TYPE}.json'], str(datasets.Split.TRAIN): [f'{_URL}/split_temporal/train_2021.{_LABEL_TYPE}.json'], str(datasets.Split.VALIDATION): [f'{_URL}/split_temporal/validation_2021.{_LABEL_TYPE}.json'], f"temporal_2020_{str(datasets.Split.TEST)}": [f'{_URL}/split_temporal/test_2020.{_LABEL_TYPE}.json'], f"temporal_2021_{str(datasets.Split.TEST)}": [f'{_URL}/split_temporal/test_2021.{_LABEL_TYPE}.json'], f"temporal_2020_{str(datasets.Split.TRAIN)}": [f'{_URL}/split_temporal/train_2020.{_LABEL_TYPE}.json'], f"temporal_2021_{str(datasets.Split.TRAIN)}": [f'{_URL}/split_temporal/train_2021.{_LABEL_TYPE}.json'], f"temporal_2020_{str(datasets.Split.VALIDATION)}": [f'{_URL}/split_temporal/validation_2020.{_LABEL_TYPE}.json'], f"temporal_2021_{str(datasets.Split.VALIDATION)}": [f'{_URL}/split_temporal/validation_2021.{_LABEL_TYPE}.json'], f"random_{str(datasets.Split.TRAIN)}": [f'{_URL}/split_random/train_random.{_LABEL_TYPE}.json'], f"random_{str(datasets.Split.VALIDATION)}": [f'{_URL}/split_random/validation_random.{_LABEL_TYPE}.json'], f"coling2022_random_{str(datasets.Split.TEST)}": [f'{_URL}/split_coling2022_random/test_random.{_LABEL_TYPE}.json'], f"coling2022_random_{str(datasets.Split.TRAIN)}": [f'{_URL}/split_coling2022_random/train_random.{_LABEL_TYPE}.json'], f"coling2022_temporal_{str(datasets.Split.TEST)}": [f'{_URL}/split_coling2022_temporal/test_2021.{_LABEL_TYPE}.json'], f"coling2022_temporal_{str(datasets.Split.TRAIN)}": [f'{_URL}/split_coling2022_temporal/train_2020.{_LABEL_TYPE}.json'], } class TweetTopicSingleConfig(datasets.BuilderConfig): """BuilderConfig""" def __init__(self, **kwargs): """BuilderConfig. Args: **kwargs: keyword arguments forwarded to super. """ super(TweetTopicSingleConfig, self).__init__(**kwargs) class TweetTopicSingle(datasets.GeneratorBasedBuilder): """Dataset.""" BUILDER_CONFIGS = [ TweetTopicSingleConfig(name=_NAME, version=datasets.Version(_VERSION), description=_DESCRIPTION), ] def _split_generators(self, dl_manager): downloaded_file = dl_manager.download_and_extract(_URLS) return [datasets.SplitGenerator(name=i, gen_kwargs={"filepaths": downloaded_file[i]}) for i in _URLS.keys()] def _generate_examples(self, filepaths): _key = 0 for filepath in filepaths: logger.info(f"generating examples from = {filepath}") with open(filepath, encoding="utf-8") as f: _list = [i for i in f.read().split('\n') if len(i) > 0] for i in _list: data = json.loads(i) yield _key, data _key += 1 def _info(self): return datasets.DatasetInfo( description=_DESCRIPTION, features=datasets.Features( { "text": datasets.Value("string"), "date": datasets.Value("string"), "label": datasets.Value("int32"), "label_name": datasets.Value("string"), "id": datasets.Value("string") } ), supervised_keys=None, homepage=_HOME_PAGE, citation=_CITATION, )