
"""
Uses the revisions table in wikinbitDb to get URLs to Web pages cited
by Wikipedia articles.  It fetches the text, stores it in the DB, and
also analyzes the text to find simple time references that it stores
in the refs_times table.
"""
# $Id: fetch_page.py 33 2011-08-04 02:09:27Z postshift@gmail.com $

import sys
import time
import pycurl
import StringIO
import datetime
import traceback
import wikinbitDb
from optparse import OptionParser

parser = OptionParser(usage="")
parser.add_option("--num_names", dest="desired_num_names", type=int)
(options, args) = parser.parse_args()

# must initialize the pycurl environment:
pycurl.global_init(pycurl.GLOBAL_DEFAULT)

# we will use this DB cursor many times below --- commiting each time
cur = wikinbitDb.conn.cursor()
num_names = 0
while num_names < options.desired_num_names:
    try:
        cur.execute("""
            select refUrl from revisions where 
              length(refUrl) > 0 and 
              not exists
                (select * from refs where refs.refUrl = revisions.refUrl) 
              limit 1""")
        url = cur.fetchall()[0]
        if len(url) == 0:
            break  # no records left
        else:
            url = url[0] # get the actual string
        assert url  # better not be None or otherwise False
    except Exception, exc:
        sys.exit(traceback.format_exc(exc))

    sys.stdout.write("Attempting %s\n\t" % url)
    sys.stdout.flush()

    c = pycurl.Curl()
    c.setopt(pycurl.URL, url)
    c.setopt(pycurl.HTTPHEADER, ["Accept:"])
    b = StringIO.StringIO()
    c.setopt(pycurl.WRITEFUNCTION, b.write)
    c.setopt(pycurl.FOLLOWLOCATION, 1)
    c.setopt(pycurl.MAXREDIRS, 5)
    content = ""
    try:
        c.perform()
        content = b.getvalue()
    except Exception, exc:
        print traceback.format_exc(exc)
        #sys.exit("failed on %s" % url)
        content = ""  # blank content is a lame way to indicate failure to fetch...

    cur.execute("insert into refs(refurl, content, lastFetched) values (%s, %s, %s)", (url, content, time.time()))

    wikinbitDb.conn.commit()

    print "%s Fetched %d bytes for %s" % (datetime.datetime.utcnow(), len(content), url)
    sys.stdout.flush()

    num_names += 1

    if num_names % 10 == 0:
        wikinbitDb.reset()
        cur = wikinbitDb.conn.cursor()

pycurl.global_cleanup()
wikinbitDb.conn.close()
print "Done!"

