#!/usr/bin/python
__author__ = "Sumin Byeon <suminb@gmail.com>"

import base64
import re
import urlparse
import urllib, urllib2
import sys, os
import string
import random
import time, datetime, calendar
import MySQLdb, _mysql_exceptions

import auth

settings = {
    'max-depth': 4,
    'timeout': 10,
}

def geturls(content, parenturl):
    pos = 0
    n = len(content)
    parenturl = urlparse.urlparse(parenturl)
    
    #print 'Looking for links...'
    
    urls = []
    while pos <= n:
        localcontent = content[pos:n]
        
        m = re.search('(<a [^>]*href=\"[0-9a-zA-Z.:;/%?&=-_]+|<(frame|iframe|img) [^>]*src=\"[0-9a-zA-Z.:;/%?&=-_]+)', localcontent, re.IGNORECASE)
        if m == None:
            break
        pos = pos + m.start()
        localcontent = localcontent[m.start():n]
       
        m = re.search('(src|href)=\"[0-9a-zA-Z.:;/%?&=-_]+\"', localcontent, re.IGNORECASE)
        if m == None:
            break
        pos = pos + m.end()
        localcontent = localcontent[m.start():m.end()]
        
        offset = 0
        if localcontent[0:3].lower() == 'src':
            offset = 3
        elif localcontent[0:4].lower() == 'href':
            offset = 4
        
        url = localcontent[offset+2:len(localcontent)-1].strip()
        if re.match('(about|javascript|mailto):.*', url, re.IGNORECASE):
            continue
            
        if url[len(url)-1] == '/':
            url = url[0:len(url)-1]
            
        if len(url) <= 0:
            continue
            
        if not re.match('(http|https).+', url):
            if url[0] == '/':
                url = '%s://%s%s' % (parenturl.scheme, parenturl.netloc, url)
            elif url[0:3] == '../':
                url = '%s://%s%s/%s' % (parenturl.scheme, parenturl.netloc, '/'.join((parenturl.path.split('/')[0:-2])), url)
            else:
                url = '%s://%s%s/%s' % (parenturl.scheme, parenturl.netloc, '/'.join((parenturl.path.split('/')[0:-1])), url)
            
        urls.append(url)
        
    #print '  %d link(s) were found' % len(urls)
    commit_urls(urls)
        
    return urls

def commit_urls(urls):
    url_count = len(urls)
    duplicate_count = 0
    
    sys.stdout.write('  ')
    for url in urls:
        query = "INSERT INTO url (url) VALUES('%s')" % url
        try:
            dbc.execute(query)
            sys.stdout.write('+')
        except _mysql_exceptions.IntegrityError:
            urls.remove(url)
            duplicate_count += 1
            sys.stdout.write('-')
            continue
        except Exception, e:
            #sys.stderr.write(str(e))
            print e
            continue
        finally:
            sys.stdout.flush()
    sys.stdout.write('\n')
    
    db.commit()
    print '  URLs: %d found, %d in database, %d committed.' % (url_count, duplicate_count, url_count-duplicate_count)
            

def geturlrow(url):
    urlrow = None
    try:
        dbc.execute("SELECT * FROM url where url = %s LIMIT 1", (url))
        urlrow = dbc.fetchone()
    except Exception, e:
        print e
        
    return urlrow

def visit(url, depth):
    if depth > settings['max-depth']:
        return
    
    print 'Crawling %s in depth %d...' % (url, depth)
    #opener = urllib2.build_opener()

    data = {}
    data = urllib.urlencode(data)
    headers = { 'User-Agent' : 'Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.0.4) Gecko/2008111319 Ubuntu/8.10 (intrepid) Firefox/3.0.4' }
    request = urllib2.Request(url, data, headers)
    response = None
    response_code = 0
    
    try:
        #page = response.open(url)
        response = urllib2.urlopen(request)
        response_code = response.getcode()
    except urllib2.HTTPError, e:
        print e
        response_code = e.getcode()
    except urllib2.URLError, e:
        print e

    row = geturlrow(url)
    if row == None:
        sys.stderr.write('URL does not exist in the database')
        return
    
    last_visit = 0
    if row['last_visit'] != None:
        last_visit = calendar.timegm(row['last_visit'].timetuple())
    current = int(time.time())
    content = None
    
    if current - last_visit < 3600*24:
        print '  This page has been visited recently, skipping...'
        
        dbc.execute('SELECT * FROM page WHERE url_id = %s ORDER BY `date` DESC LIMIT 1', row['id'])
        pagerow = dbc.fetchone()
        if pagerow != None:
            content = pagerow['content']
    else:
        try:
            result = dbc.execute('UPDATE url SET last_response = %s, last_visit = FROM_UNIXTIME(%s) WHERE id = %s LIMIT 1', (response_code, int(time.time()), row['id']))
        except Exception, e:
            print 'Exception:', e
            
        if response != None:
            try:
                content = response.read()
            except Exception, e:
                print '  Exception: Could not read the page:', e
                return
                
            if response.headers.type[0:4] != 'text':
                content = base64.b64encode(content)
            
            query = "INSERT INTO page (url_id, type, content) VALUES(%s, %s, %s)"
            args = (row['id'], response.headers.type, content)
            
            try:
                dbc.execute(query, args)
            except Exception, e:
                print '  Exception:', e
            
        try:
            db.commit()
        except Exception, e:
            print '  Exception:', e
    
    if response != None and response.headers.type == 'text/html':
        return geturls(content, url)
    else:
        return []
    
    '''
    urls = geturls(content, url)
    if len(urls) > 0:
        for u in urls:
            visit(u, depth+1)
    '''

# TODO: Needs to be replaced with getopts or something
if len(sys.argv) > 2:
    settings['max-depth'] = int(sys.argv[2])
    
db = None
try:
    db = MySQLdb.connect(host=auth.host, user=auth.user, passwd=auth.passwd, db=auth.db)
except Exception, e:
    print 'Could not connect to database:', e
    exit(1)
    
dbc = db.cursor(MySQLdb.cursors.DictCursor)

    
if len(sys.argv) > 1:
    url = sys.argv[1]
    commit_urls([url])
    urls = visit(url, 0)
    for url in urls:
        visit(url, 0)
else:
    while 1:
        urlrowset = None
        try:
            dbc.execute('SELECT * FROM url WHERE last_response is NULL ORDER BY RAND() LIMIT 25')
            urlrowset = dbc.fetchall()
        except Exception, e:
            print e
        
        for urlrow in urlrowset:    
            visit(urlrow['url'], 0)

db.close()

