from django.core.management.base import BaseCommand
from posts.models import Post, Comment, Category

import urllib
import httplib
import lxml
from lxml import html
import time, datetime
import json

host = 'www.reddit.com'
headers = {"User-agent" : "NothingLeftToLose"}
retries = 3

def scrape_posts(pagelimit=-1):
     after = None
     pagenum = 1
     while pagenum != pagelimit:
         url = 'http://www.reddit.com/.json'
         if after:
             url += '?after=%s' % after
         print 'Scraping url: %s\n' % (url)
         
         conn = httplib.HTTPConnection(host)
         conn.request('GET', url, headers=headers)
         resp = conn.getresponse()
         data = resp.read()
         conn.close()

         after = _parsepage(data)
         pagenum+=1
         if not after:
             print "scrape completed"
             return r

def _parsepage(data):
    page = json.loads(data)
    if not page['kind'] == 'Listing':
        return None
    
    after = page['data']['after']
    
    for child in page['data']['children']:
        if child['kind'] == 't3':
            sub = child['data']
            writesubmission(sub)
             
    print '    writing %d submissions to database...' % (len(page['data']['children']))
    return after

def writesubmission(sub):
    print sub


class Command(BaseCommand):
    help = 'Scrapes Reddit frontpage for top posts'

    def handle(self, *arg, **options):
        self.stdout.write('\nScraping started at %s\n' % str(datetime.datetime.now()))

        after = None
        for i in range(0, 404):
            url = 'http://www.reddit.com/.json'
            if after:
                url += '?after=%s' % after
            self.stdout.write('Scraping url: %s\n' % url)
            r = requests.get(url)
            root = lxml.html.fromstring(r.content)
            self.stdout.write(root)

    def _parsepage(self, data):
        page = json.loads(data)
        if not page['kind'] == 'Listing':
            return (None, 0)

        after = page['data']['after']

        for child in page['data']['children']:
            if child['kind'] == 't3':
                sub = child['data']
                self.db.writesubmission(sub)
                minscore = min( minscore, int(sub['score']) )

        print '    writing %d submissions (minscore = %d) to database...' % (len(page['data']['children']), minscore)
        return (after, minscore)

if __name__ == '__main__':
    scrape_posts(10)
