huanggab commited on
Commit
b01d6d3
1 Parent(s): 8d60168

add processing code

Browse files
processing_code/accounts.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import praw
2
+
3
+ print('Go to https://praw.readthedocs.io/en/stable/getting_started/authentication.html for authentication instructions')
4
+
5
+ def get_agent():
6
+ reddit = praw.Reddit(
7
+ client_id="<fill this up>",
8
+ client_secret="<fill this up>",
9
+ user_agent="<fill this up>",
10
+ )
11
+ return reddit
processing_code/process_convo.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
processing_code/reddit-2022-10-20-dump.csv ADDED
The diff for this file is too large to render. See raw diff
 
processing_code/scrape_hot.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import praw
2
+ from reddit_scraper import download_subreddit_posts
3
+ import accounts
4
+
5
+
6
+ reddit = accounts.get_agent()
7
+ subreddit = reddit.subreddit("haiku")
8
+ download_subreddit_posts(subreddit, mode='hot', limit=1000, max_fails=1000)
9
+
processing_code/scrape_new.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import praw
2
+ from reddit_scraper import download_subreddit_posts
3
+ import accounts
4
+
5
+
6
+ reddit = accounts.get_agent()
7
+ subreddit = reddit.subreddit("haiku")
8
+ download_subreddit_posts(subreddit, mode='new', limit=20000, max_fails=20000)
9
+
processing_code/scrape_top.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import praw
2
+ from reddit_scraper import download_subreddit_posts
3
+ import accounts
4
+
5
+
6
+ reddit = accounts.get_agent()
7
+ subreddit = reddit.subreddit("haiku")
8
+ download_subreddit_posts(subreddit, mode='top', limit=20000, max_fails=20000)
9
+
processing_code/scrape_top_month.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import praw
2
+ from reddit_scraper import download_subreddit_posts
3
+ import accounts
4
+
5
+
6
+ reddit = accounts.get_agent()
7
+ subreddit = reddit.subreddit("haiku")
8
+ download_subreddit_posts(subreddit, mode='top', limit=1000, max_fails=1000, time_filter='month')
9
+
processing_code/scrape_top_year.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import praw
2
+ from reddit_scraper import download_subreddit_posts
3
+ import accounts
4
+
5
+
6
+ reddit = accounts.get_agent()
7
+ subreddit = reddit.subreddit("haiku")
8
+ download_subreddit_posts(subreddit, mode='top', limit=1000, max_fails=1000, time_filter='year')
9
+