#!/usr/bin/python
# This is Brian Cole's original script to scrape the Maryland SDAT website and is only here for historical purposes.
# This script has been superceded by the Ruby script 'scrape.rb' which performs a full scrape, whereas this script only pulled a subset of the available data.
import timeoutsocket, string, MySQLdb, sys, _mysql, socket, os, time, urllib, re, md5

#batchsize=100
timeoutsocket.setDefaultSocketTimeout(10)

null_list = ['N/A','&nbsp;','&nbsp']

error_list = ['The Following error',
              'Failed to connect',
              'There was an error returning your results',
              'The page cannot be displayed',
              'error \'80044000\'',
              'The remote procedure call failed',
              'Please try your search again later',
              'An exception occurred']
 
table='sdat'
fields = {
'class':r'rp_def.html#use.*?size="1">([^<]+)</font>',
'prinres':r'rp_def.html#prinres.*?size="1">([^<]+)</font>',
'bldg_area':r'size="1">([^<]+)<AM EncUnit\$>',
'land_area':r'size="1">([^<]+)<AM LandUnit\$>',
'bldg_type':r'html#exterior.*?size="1">.*?size="1">.*?size="1">([^<]+)</font>',
'land':r'rp_def.html#prefland.*?size="1">([^<]+)<br>[^<]+<br>[^<]+<br>[^<]+</font>',
'bldg':r'rp_def.html#prefland.*?size="1">[^<]+<br>([^<]+)<br>[^<]+<br>[^<]+</font>',
'total':r'rp_def.html#prefland.*?size="1">[^<]+<br>[^<]+<br>([^<]+)<br>[^<]+</font>',
'p_land':r'rp_def.html#prefland.*?size="1">[^<]+<br>[^<]+<br>[^<]+<br>([^<]+)</font>',
'tax_exempt':r'rp_def.html#taxexempt.*?size="1">([^<]+)</font>',
'tax_exempt_class':r'Exempt Class.*?size="1">([^<]+)</font>',
'tax_recapture':r'Exempt Class.*?size="1".*?size="1">([^<]+)</font>',
'delete_date':r'color="red"><b>([^<]+)</b>',
'addr':r'rp_def.html#premiseaddress.*?size="1".*?size="1".*?size="1">([^<]+?)</font>',
}
#Compile the Reg Exp for speedier searches
for f in fields.keys():
    fields[f]=re.compile(fields[f],re.DOTALL)
#The other fields in the database
info_fields = ['client_id','ts','status','traceback','webpage','md5']
id_fields = ['lot','town_cd']


q0='lock tables '+table+' write'
q1='select lot, status, town_cd from '+table+' where status is NULL OR status like "%s" limit %s'
q2='unlock tables'
q4='update '+table+' set client_id="%s", ts="%s", status="taken" where lot in (%s)'
q5='replace '+table+' set '
for f in fields.keys()+info_fields+id_fields:
    q5 = q5 + f + '=%(' + f + ')s, '
q5=q5[:-2]

def timestamp():
    fmt='%Y%m%d%H%M%S'
    return time.strftime(fmt, time.localtime(time.time()))
start=long(timestamp())
end=long(timestamp())

def chomp(s):
    if s=='':
        return s
    elif s[-2:]=='\r\n':
        return s[:-2]
    elif s[-1]=='\n':
        return s[:-1]
    else:
        return s

def trim(s):
    s=string.strip(s)
    if s=='':
        return None
    else:
        return s
def printrec(d):
    errmsg('='*60)
    ks=d.keys()
    ks.sort()
    for k in ks:
        if k!='webpage':
            errmsg('%s:%s' % (k, d[k]))
    errmsg('='*60)
    
def printrecs(l):
    for rec in l:
        printrec(rec)
        
def errmsg(s):
    msg='%s %s %s\n' % (client_id,timestamp(), s)
    sys.stderr.write(msg)
    sys.stderr.flush()
        
import traceback, cStringIO

def print_exception():
    f=cStringIO.StringIO()
    traceback.print_exc(100,f)
    s=f.getvalue()
    return s

def hexify(s):
    r=''
    for c in s:
        r=r+hex(ord(c))[2:]
    return r

def md5ify(s):
    md=md5.new(s)
    return hexify(md.digest())


def getRecords(batchsize):
#    end = long(timestamp())
    try:
        cur.execute(q0)  # Lock the table, so no other clients can read it
        num=cur.execute(q1 % ('',batchsize))  # Get the records
        #GET TIMED OUT RECORDS IF NO UNTOUCHED RECORDS EXIST
        if num == 0:
            errmsg('this process is doing timed out records')
            cur.execute(q1 % ('timedout_%',batchsize))
        parcels=cur.fetchall()
        ids=map(lambda x: '"'+x[0]+'"', parcels)  # Want list of record ids
        cur.execute(q4 % (client_id, timestamp(), string.join(ids,', '))) # Mark these records as taken
    finally:
        cur.execute(q2) # Unlock the table no matter what!
        return parcels
#    print str(long(timestamp())-end)+' Seconds To Get Records'

sdataddr='http://sdatcert3.resiusa.org/rp_rewrite/detail.asp?accountnumber=%s&county=%s&intMenu=2&SearchType=Account'
def getWebPage(lot):
#    end = long(timestamp())
    # Get web page from BRT site for this property
#    errmsg('retrieving a web page from SDAT')
    if lot[:2] == '02':
        acct = lot[2:4]+'+'+lot[4:7]+'+'+lot[7:]
    elif lot[:2] == '03':
        acct = string.replace(lot[2:], ' ','+')
        acct = acct[:2]+'+'+acct[2:4]+'+'+acct[4:9]+'+'+acct[9:]
    else:
        acct = lot[2:4]+'+'+lot[4:]
    query=sdataddr % (acct, lot[:2])
#    print query
    s=urllib.urlopen(query).read()
#    print str(long(timestamp())-end)+' Seconds To Get Webpage'
#    errmsg('end page retrieval')
    return s

def parseRec(s):
    d={}
    #Cycle through Reg Exp Dictionary
    for f in fields.keys():
        m=fields[f].search(s)
        if m:
            t = m.group(1)
            for n in null_list:
                t=string.replace(t,n,'')
            t=trim(t)
            d[f]=t
        else:
            d[f]=None
    d['status']='done'
    return d

def process_timeout(status):
    #int() will raise an exception if there isn't a numerical index to the time out
    try:
        #ALLOW UP TO 10 TIME OUTS
        if int(status[9:]) < 10:
            d=status[:9]+str(int(status[9:])+1)
            #THEN DIE
        else:
            d='dead_timedout'
    except:
        d='timedout_1'
    return d

def scrape(batchsize,rec_count=0):
    end=long(timestamp())
    while rec_count!=0:
        rec_count=rec_count-1
        errmsg('retrieving a batch of records')
        # Get a batch of records
        parcels=getRecords(batchsize)
        if not parcels:  # Are we done?
            break
        # Get info. for this batch of records
        recs=[]
        for lot, status, town_cd in parcels:
            s=''
            d={}
            try:
                s=getWebPage(lot)
                d=parseRec(s)
#                d['webpage']=s
                for e in error_list:
                    if string.find(s,e) != -1:
                        d['status']=process_timeout(status)
            except:
                d['status']='error'
                d['traceback']=print_exception()
                if sys.exc_info()[0]==KeyboardInterrupt:
                    raise
                elif sys.exc_info()[0]==timeoutsocket.Timeout:
                    d['status']=process_timeout(status)
            #INSERT NEEDED IDENTIFICATION INFO
            for f in id_fields:
                d[f]=eval(f)
            d['client_id']=client_id
            d['ts']=timestamp()
            for f in fields.keys()+info_fields+id_fields:
                if f not in d.keys():
                    d[f]=None
            if not d['addr']:
                d['webpage']=s
#            printrec(d)
            recs.append(d)
        # Save results in sql db
        errmsg('saving completed records in SQL db')
        cur.executemany(q5, recs)
        errmsg('done saving completed records in SQL')
    end=str(long(timestamp())-end)
#    print end
    errmsg('this process is done')
    return end

def makeoptd(opts):
    d={}
    for o, v in opts:
        if o[0]=='-':
            o=o[1:]
        if o[0]=='-':
            o=o[1:]
        d[o]=v
    return d

#scrape()

import getopt
if 1:
    args=sys.argv[1:]
    opts, pargs=getopt.getopt(args, '', ('numtasks=', 'help', 'fresh','retry','flush','repeat=','batchsize='))
    optd=makeoptd(opts)
    if optd.has_key('help'):
        print 'Usage:  scrapebrt --help --fresh --numtasks=1 --repeat=-1 --retry --flush --batchsize=100'
        sys.exit(0)

    if optd.has_key('fresh'):
            con=MySQLdb.connect(host='localhost', user='brian', passwd='39sharon', db='maryland')
            cur=con.cursor()
            #CREATE SUB-TABLE
            create = 'CREATE TABLE `'+table+'` SELECT town_cd, lot FROM `maryland`;'
            try:
                cur.execute(create)
            except:
                cur.execute('DROP TABLE '+table)
                cur.execute(create)
            cur.execute('ALTER TABLE '+table+' MODIFY lot char(15) NOT NULL;')
            cur.execute('ALTER TABLE '+table+' ADD PRIMARY KEY (lot);')

            #CREATE NEEDED COLUMNS
            update = 'UPDATE '+table+' SET %s=NULL;'
            alter = 'ALTER TABLE '+table+' ADD COLUMN %s varchar(255);'
            ks=fields.keys()
            ks.sort()
            for f in ks+info_fields:
                try:
                    cur.execute(alter % f)
                except:
                    cur.execute(update % f)
            cur.execute('ALTER TABLE '+table+' MODIFY traceback longtext;')
            cur.execute('ALTER TABLE '+table+' MODIFY webpage longtext;')

    if optd.has_key('retry'):
        con=MySQLdb.connect(host='localhost', user='brian',  passwd='39sharon', db='maryland')
        cur=con.cursor()
        cur.execute('UPDATE '+table+' SET status=NULL where status like "error" or status like "%timedout%";')

    if optd.has_key('flush'):
        for pid in os.popen('ps -A | grep scrape_sdat.py').read().split('\n'):
            pid=trim(pid)
            if pid:
                pid=pid.split(' ')[0]
                if not os.getpid() == int(pid):
                    print 'Killing:'+pid
                    os.kill(int(pid),9)
        con=MySQLdb.connect(host='localhost', user='brian',  passwd='39sharon', db='maryland')
        cur=con.cursor()
        print 'Cleaning database'
        cur.execute('UPDATE '+table+' SET status=NULL WHERE status="taken";')

#client_id = 'localhost'
#scrape(-1)

    numtasks=int(optd.get('numtasks', '1'))
    repeat=int(optd.get('repeat', '-1'))
    batchsize=int(optd.get('batchsize', '100'))
    # Start up the requested number of sub tasks
    err=open('scrape_sdat.err','a')
  
    for x in xrange(0,numtasks):
        time.sleep(10)
        r=os.fork()
        if not r:
            # We are the children
            con=MySQLdb.connect(host='mail', user='brian', passwd='39sharon', db='maryland')
            cur=con.cursor()
            client_id='%s:%i_%i' % (socket.gethostname(), numtasks, os.getpid())
            errmsg('starting client %s' % client_id)
            try:
                end=scrape(batchsize,repeat)
            except:
                e='='*60
                e=e+'\nThis client died prematurely:'+client_id+'\n'
                e=e+'='*60
                e=e+'\n'+print_exception()+'\n'
                errmsg(e)
                err.write(e)
            sys.exit(0) # Don't have any kids ourselves!
    os.wait()
    sys.exit(0)
