# -*- coding: utf-8 -*-

import socket
import threading
import time
import sys
import os
import httplib, urllib, urllib2
import re
import sqlite3
from urlparse import urljoin

loadedUrls = []

def DBReadURLs():
    conn = sqlite3.connect("./htmlpagechecker.sqlite")
    cur = conn.cursor()
    cur.execute("select url,id,interval from PC_PAGEURLS WHERE enabled=1;")
    res = cur.fetchall()
    lines = []
    for line in res:
        lines += [{'url':line[0],'urlid':line[1],'interval':line[2]}]
    cur.close()
    conn.close()
    return lines

def DBGetRefresh(urlid):
    conn = sqlite3.connect("./htmlpagechecker.sqlite")
    cur = conn.cursor()
    cur.execute("select refreshonce, refreshevery from PC_PAGEURLS WHERE enabled=1 and id="+str(urlid)+";")
    res = cur.fetchall()
    lines = []
    ret = 0
    for line in res:
        if line[0]==1:
            ret = 1
            DBClearRefreshOnce(urlid)
        elif line[1]==1:
            ret = 1
    cur.close()
    conn.close()
    return ret

def DBClearRefreshOnce(urlid):
    conn = sqlite3.connect("./htmlpagechecker.sqlite")
    cur = conn.cursor()
    cur.execute("update PC_PAGEURLS set refreshonce=0 where id="+str(urlid)+";")
    conn.commit()
    cur.close()
    conn.close()
    return
    

def DBWriteLogs(urlid,okcount,errcount):
    conn = sqlite3.connect("./htmlpagechecker.sqlite")
    cur = conn.cursor()
    cur.execute("insert into PC_PAGELOGS (urlid, okcount, errcount) values ("+str(urlid)+", "+str(okcount)+", "+str(errcount)+");")
    conn.commit()
    cur.close()
    conn.close()
    return

def DBInitScan(urlid):
    conn = sqlite3.connect("./htmlpagechecker.sqlite")
    cur = conn.cursor()
    cur.execute("select enabled from PC_PAGEURLS WHERE id="+str(urlid)+";")
    res = cur.fetchall()
    ret = 0
    for line in res:
        ret = line[0]
    if ret:
        cur.execute("update PC_PAGEURLS set status='running' where id="+str(urlid)+";")
        conn.commit()
    cur.close()
    conn.close()
    return ret

def DBWriteErrorLogs(urlid,lnk,errtype):
    conn = sqlite3.connect("./htmlpagechecker.sqlite")
    cur = conn.cursor()
    cur.execute("insert into PC_PAGEERRLOGS (urlreferid, lnk, errtype) values ("+str(urlid)+", '"+lnk+"', '"+errtype+"');")
    conn.commit()
    cur.close()
    conn.close()
    return
    
def GETHtmlLines(url):  
    if url==None : return  
    #if not httpExists(url): return
    try:  
        page = urllib.urlopen(url)     
        html = page.readlines()  
        page.close()  
        return html  
    except:  
        print "GETHtmlLines() error!"  
        return 

def GETUrl(url,urlid):
    lines= GETHtmlLines(url)
    regx = r"""((src)|(href))=(\"|\')?(\S+)(\'|\")"""
    lists = GETRegList(lines,regx)
    srcLists = ReadPageCache(url,urlid)
    ok = 0
    err = 0
    if srcLists != str(lists) or DBGetRefresh(urlid):    
        for lnk in lists:
            #thread.start_new_thread(LinkThread,(url,lnk))
            ret = LinkThread(url,lnk,urlid)
            if ret==1:
                ok+=1
            else:
                err+=1
            pass
    else:
        print "Not modified, pass"
    if err>0 or ok >0:
        DBWriteLogs(urlid,ok,err)
    WritePageCache(url,urlid,lists)

def LinkThread(url,lnk,urlid):
    headers = { 'User-Agent' : "KanKanNews PageMonitor V0.1B20120720" }
    aurl = ""
    if CheckJs(lnk):
        aurl = ""
    elif CheckRelUrl(lnk):
        aurl = urljoin(url, lnk)
        aurl = reduce(lambda r,x: r.replace(x[0], x[1]), [('/../', '/'), ('\n', ''), ('\r', '')], aurl)
    else:
        aurl = lnk
    if aurl!="":
        try:
            req = urllib2.Request(aurl, None, headers)
            response = urllib2.urlopen(req)
            print aurl , ":" , str(response.code)
            return 1
        except urllib2.HTTPError, e:
            print aurl , ":" , e
            DBWriteErrorLogs(urlid,aurl,str(e))
            return 0
        except urllib2.URLError, e:
            print aurl , ":" , e
            return 0
        except socket.gaierror,e:
            print aurl , ":" , e
            DBWriteErrorLogs(urlid,aurl,str(e))
            return 0
        except httplib.BadStatusLine, e:
            print aurl , ":" , e
            return 0 
        except KeyboardInterrupt:
            print "\r\nKeyboardInterrupt"
            exit()
        except :
            print ""
    return
    
def CheckRelUrl(url):
    if url[0:7]=="http://":
        return 0
    else:
        return 1

def CheckJs(lnk):
    #print lnk[1:11].lower() + "++" "javascript:"
    if lnk[0:11].lower()=="javascript:":
        return 1
    elif lnk[0]=="+":
        return 1
    else:
        return 0

def WritePageCache(url,urlid,content):
    fileHandle = open ('./cache/'+str(urlid)+'_'+'.autocache.html', 'w')
    fileHandle.write(str(content))
    fileHandle.close()
    return

def ReadPageCache(url,urlid):
    try:
        fileHandle = open ('./cache/'+str(urlid)+'_'+'.autocache.html')
        content = fileHandle.read()
        fileHandle.close()
    except IOError:
        print 'No Cache, skipped'
        content = ''
    return content

def GETRegList(linesList,regx):  
    if linesList==None : return   
    #print linesList
    rtnList=[]  
    for line in linesList:  
        reComment = re.compile(r"""<!--([\s\S]*)-->""")
        line = reComment.sub('',line)
        matchs = re.search(regx, str(line), re.IGNORECASE)  
        if matchs!=None:  
            allGroups = matchs.groups()  
            for foundStr in allGroups:  
                if foundStr not in rtnList and foundStr != None and len(foundStr)>4  and foundStr[0]!='#' and foundStr[1]!='#':  
                    foundStr=foundStr.replace("\'","")
                    rtnList.append(foundStr)  
    return rtnList

def ThreadMain():
    while 1:
        urls = DBReadURLs()
        threads = []
        for url in urls:
            t = threading.Thread(target=ThreadServe,args=(url['url'],url['urlid'],url['interval']))
            if url['urlid'] in loadedUrls:
                pass
            else:
                loadedUrls.append(url['urlid'])
                threads.append(t)
        nloops = range(len(threads))
        for i in nloops:
            print i
            threads[i].start()
        time.sleep(10)
    return

def ThreadServe(url,urlid,interval):
    while 1:
        try:
            if DBInitScan(urlid)==0:
                print url + " terminated"
                del loadedUrls[loadedUrls.index(urlid)]
                break
            GETUrl(url,urlid)
            print url + " done"
            time.sleep(interval)
        except KeyboardInterrupt:
            exit()
    return 

if __name__ == '__main__':
    try:
        print u'-'
        print u'LastUpdate: 20120620 R1'
        print u'MgrServ Ver 0.1.0'
        #GETUrl("http://www.kankanews.com/")
        ThreadMain()
    except KeyboardInterrupt:
        print "\r\nKeyboardInterrupt"
        exit()

