#!/usr/bin/env python

__author__ = "Roney Reis"
__license__ = "Public"
__version__ = "0.1"
__email__ = "roney.reis@lia.ufc.br"
__status__ = "Development"
import urllib
import MySQLdb
import logging
from datetime import date
import mechanize


# Path of Properties
pathProperties = "C:/Users/Roney/Documents/eclipseDesenvolvimento/ProjectTCC/src/ufc/tcc.properties"


def donwloadHTML(url):
    sock = urllib.urlopen(url)
    htmlSource = sock.read()  
    sock.close()
    #print htmlSource
    return htmlSource

def configureLog(logLevel, logName):
    logFile = logName
    logFormat = '[%(asctime)s] %(levelname)s: %(message)s'
    dateFormat= '%d-%m-%Y %H:%M:%S'
    effectiveLevel = getattr(logging, logLevel.upper())
    logging.basicConfig(filename=logFile, filemode='w', level=effectiveLevel, format=logFormat, datefmt = dateFormat)

def isValidUrl(url):
    try:
        br = mechanize.Browser()
        br.addheaders = [('User-agent', "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; AskTB5.6)")]
        br.set_handle_robots(False)
        res = br.open(url, timeout=30)
        logging.info("URL %s is valid" % url)
        return True
    except:
        logging.info("Can't check existence of %s" % url)
        return False


def execute(url):
    configureLog("DEBUG", "iCrawler-%s.log" % date.today().isoformat())
    logging.info("Started")

    #PARAMETER PARSING
    args = {}
    properties = open(pathProperties)
    logging.info("Open Properties")
    parament = []
    for line in properties:
        i = line.find("=") + 1
        aux = line[i:].strip()
        parament.append(aux)
    args["u"] = parament[0] # database user
    args["p"] = parament[1] # database password
    args["d"] = parament[2] # database name
    args["l"] = parament[3] # database host
    #args["url"] = parament[4] # url
    logging.info("Arguments: %s" %args)
    
    if isValidUrl(url):       
        #Open db connection
        #print args
        try:
            conn = MySQLdb.connect(host=args["l"], user=args["u"], passwd=args["p"], db=args["d"], use_unicode=True, charset='utf8')
            cursor = conn.cursor()
        except:
            logging.exception("Can't connect to database")
            conn.close()
            return False
        else:
            logging.info("Connect to URL %s",url)
            stringURL=donwloadHTML(url)
            stringURL = stringURL.replace('\n\n',' ')
            stringURL = stringURL.replace("\'",'')
            stringURL = stringURL.replace('\"','')
            #print stringURL
            query = "INSERT INTO page (url, textHTML)VALUES ('%s', '%s')" %(url, stringURL)
            cursor.execute(query)
            conn.commit()
            logging.info("COMMIT is sucess")  
            conn.close()
            return True 
    logging.info("Finished")
    return False 

if __name__ == "__main__":
    print "Start Crawler"
    aux = execute("https://developers.google.com/storage/sla")
    print "Finish Crawler"
                
