Datasets:
Tasks:
Text Classification
Modalities:
Text
Sub-tasks:
sentiment-classification
Languages:
English
Size:
10K<n<100K
ArXiv:
License:
initial import
Browse files- .gitignore +1 -0
- README.md +33 -0
- ReactionGIF.ids.json +0 -0
- fetch-tweets.py +50 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
.env/
|
README.md
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
## ReactionGIF
|
2 |
+
|
3 |
+
> From https://github.com/bshmueli/ReactionGIF
|
4 |
+
|
5 |
+
|
6 |
+
|
7 |
+
___
|
8 |
+
|
9 |
+
## Excerpt from original repo readme
|
10 |
+
|
11 |
+
ReactionGIF is a unique, first-of-its-kind dataset of 30K sarcastic tweets and their GIF reactions.
|
12 |
+
|
13 |
+
To find out more about ReactionGIF,
|
14 |
+
check out our ACL 2021 paper:
|
15 |
+
|
16 |
+
* Shmueli, Ray and Ku, [Happy Dance, Slow Clap: Using Reaction GIFs to Predict Induced Affect on Twitter](https://arxiv.org/abs/2105.09967)
|
17 |
+
|
18 |
+
|
19 |
+
## Citation
|
20 |
+
|
21 |
+
If you use our dataset, kindly cite the paper using the following BibTex entry:
|
22 |
+
|
23 |
+
```bibtex
|
24 |
+
@misc{shmueli2021happy,
|
25 |
+
title={Happy Dance, Slow Clap: Using Reaction {GIFs} to Predict Induced Affect on {Twitter}},
|
26 |
+
author={Boaz Shmueli and Soumya Ray and Lun-Wei Ku},
|
27 |
+
year={2021},
|
28 |
+
eprint={2105.09967},
|
29 |
+
archivePrefix={arXiv},
|
30 |
+
primaryClass={cs.CL}
|
31 |
+
}
|
32 |
+
```
|
33 |
+
|
ReactionGIF.ids.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
fetch-tweets.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from credentials import CONSUMER_KEY, CONSUMER_SECRET
|
2 |
+
import tweepy, json
|
3 |
+
from time import sleep
|
4 |
+
|
5 |
+
def fetch_tweets(rows):
|
6 |
+
tweet_ids = [row['original_id'] for row in rows]
|
7 |
+
return {tweet.id_str:tweet for tweet in fetch_ids(tweet_ids) if tweet.id_str != ''}
|
8 |
+
|
9 |
+
def fetch_ids(ids):
|
10 |
+
id_lists = [ids[x:x+100] for x in range(0, len(ids), 100)]
|
11 |
+
tweets = []
|
12 |
+
for idx, id_list in enumerate(id_lists):
|
13 |
+
print('{}.'.format(len(id_lists) - idx), flush=True, end='')
|
14 |
+
tweets.extend([tweet for tweet in api.statuses_lookup(id_list, tweet_mode='extended')])
|
15 |
+
print()
|
16 |
+
return tweets
|
17 |
+
|
18 |
+
def deanon(de_anon_file, rows, tweets):
|
19 |
+
found = 0
|
20 |
+
with open(de_anon_file, 'w') as f:
|
21 |
+
for row in rows:
|
22 |
+
if row['original_id'] in tweets:
|
23 |
+
found += 1
|
24 |
+
row['text'] = tweets[row['original_id']].full_text
|
25 |
+
f.write((json.dumps(row, ensure_ascii=False) + "\n"))
|
26 |
+
print(f'Found {found} tweets out of {len(rows)}.')
|
27 |
+
|
28 |
+
def convert(anon_file, de_anon_file):
|
29 |
+
print('Fetching texts for {}'.format(anon_file))
|
30 |
+
rows = [json.loads(row) for row in open(anon_file, 'r').readlines()]
|
31 |
+
tweets = fetch_tweets(rows)
|
32 |
+
deanon(de_anon_file, rows, tweets)
|
33 |
+
|
34 |
+
if __name__ == "__main__":
|
35 |
+
try:
|
36 |
+
CONSUMER_KEY
|
37 |
+
CONSUMER_SECRET
|
38 |
+
except:
|
39 |
+
print('Edit credentials.py to add your Twitter API credentials in the first two lines (CONSUMER_KEY and CONSUMER_SECRET)')
|
40 |
+
print('See here for more information on getting API credentials: https://developer.twitter.com/en/apps')
|
41 |
+
exit(1)
|
42 |
+
auth = tweepy.AppAuthHandler(CONSUMER_KEY , CONSUMER_SECRET)
|
43 |
+
api = tweepy.API(auth, wait_on_rate_limit=True,
|
44 |
+
wait_on_rate_limit_notify=True,
|
45 |
+
retry_count=10, retry_delay=60,
|
46 |
+
retry_errors=[400] + list(range(402,599)))
|
47 |
+
print('This can take some time, so make yourself a cup of Taiwanese oolong tea and let the magic happen!')
|
48 |
+
sleep(3)
|
49 |
+
convert('ReactionGIF.ids.json', 'ReactionGIF.json')
|
50 |
+
print('That\'s it! Hope you enjoyed the ride :)')
|