asahi417 commited on
Commit
51f8aff
1 Parent(s): 0f543b6
Files changed (1) hide show
  1. super_tweet_eval.py +3 -3
super_tweet_eval.py CHANGED
@@ -2,7 +2,7 @@
2
  import json
3
  import datasets
4
 
5
- _VERSION = "0.1.0"
6
  _SUPER_TWEET_EVAL_CITATION = """TBA"""
7
  _SUPER_TWEET_EVAL_DESCRIPTION = """TBA"""
8
  _TWEET_TOPIC_DESCRIPTION = """
@@ -172,8 +172,8 @@ class SuperTweetEval(datasets.GeneratorBasedBuilder):
172
  description=_TEMPO_WIC_DESCRIPTION,
173
  citation=_TEMPO_WIC_CITATION,
174
  features=['gold_label_binary', 'word',
175
- 'text_1', 'text_1_tokenized', 'token_idx_1', 'text_start_1', 'text_end_1', 'date_1',
176
- 'text_2', 'text_2_tokenized', 'token_idx_2', 'text_start_2', 'text_end_2', 'date_2'],
177
  data_url="https://huggingface.co/datasets/cardiffnlp/super_tweet_eval/resolve/main/data/tempo_wic",
178
  )
179
  ]
 
2
  import json
3
  import datasets
4
 
5
+ _VERSION = "0.1.1"
6
  _SUPER_TWEET_EVAL_CITATION = """TBA"""
7
  _SUPER_TWEET_EVAL_DESCRIPTION = """TBA"""
8
  _TWEET_TOPIC_DESCRIPTION = """
 
172
  description=_TEMPO_WIC_DESCRIPTION,
173
  citation=_TEMPO_WIC_CITATION,
174
  features=['gold_label_binary', 'word',
175
+ 'text_1', 'text_1_tokenized', 'token_idx_1', 'date_1',
176
+ 'text_2', 'text_2_tokenized', 'token_idx_2', 'date_2'],
177
  data_url="https://huggingface.co/datasets/cardiffnlp/super_tweet_eval/resolve/main/data/tempo_wic",
178
  )
179
  ]