Update tweet_topic_multi.py
Browse files- tweet_topic_multi.py +3 -3
tweet_topic_multi.py
CHANGED
@@ -6,7 +6,7 @@ import datasets
|
|
6 |
logger = datasets.logging.get_logger(__name__)
|
7 |
_DESCRIPTION = """[TweetTopic](TBA)"""
|
8 |
|
9 |
-
_VERSION = "1.0.
|
10 |
_CITATION = """
|
11 |
TBA
|
12 |
"""
|
@@ -16,8 +16,8 @@ _NAME = f"tweet_topic_{_LABEL_TYPE}"
|
|
16 |
_URL = f'https://huggingface.co/datasets/cardiffnlp/{_NAME}/raw/main/dataset'
|
17 |
_URLS = {
|
18 |
str(datasets.Split.TEST): [f'{_URL}/split_temporal/test_2021.{_LABEL_TYPE}.json'],
|
19 |
-
str(datasets.Split.TRAIN): [f'{_URL}/split_temporal/
|
20 |
-
str(datasets.Split.VALIDATION): [f'{_URL}/split_temporal/
|
21 |
f"temporal_2020_{str(datasets.Split.TEST)}": [f'{_URL}/split_temporal/test_2020.{_LABEL_TYPE}.json'],
|
22 |
f"temporal_2021_{str(datasets.Split.TEST)}": [f'{_URL}/split_temporal/test_2021.{_LABEL_TYPE}.json'],
|
23 |
f"temporal_2020_{str(datasets.Split.TRAIN)}": [f'{_URL}/split_temporal/train_2020.{_LABEL_TYPE}.json'],
|
|
|
6 |
logger = datasets.logging.get_logger(__name__)
|
7 |
_DESCRIPTION = """[TweetTopic](TBA)"""
|
8 |
|
9 |
+
_VERSION = "1.0.2"
|
10 |
_CITATION = """
|
11 |
TBA
|
12 |
"""
|
|
|
16 |
_URL = f'https://huggingface.co/datasets/cardiffnlp/{_NAME}/raw/main/dataset'
|
17 |
_URLS = {
|
18 |
str(datasets.Split.TEST): [f'{_URL}/split_temporal/test_2021.{_LABEL_TYPE}.json'],
|
19 |
+
str(datasets.Split.TRAIN): [f'{_URL}/split_temporal/train_2020.{_LABEL_TYPE}.json'],
|
20 |
+
str(datasets.Split.VALIDATION): [f'{_URL}/split_temporal/validation_2020.{_LABEL_TYPE}.json'],
|
21 |
f"temporal_2020_{str(datasets.Split.TEST)}": [f'{_URL}/split_temporal/test_2020.{_LABEL_TYPE}.json'],
|
22 |
f"temporal_2021_{str(datasets.Split.TEST)}": [f'{_URL}/split_temporal/test_2021.{_LABEL_TYPE}.json'],
|
23 |
f"temporal_2020_{str(datasets.Split.TRAIN)}": [f'{_URL}/split_temporal/train_2020.{_LABEL_TYPE}.json'],
|