# -*- coding: utf-8 -*-
'''
Created on Jul 11, 2014

@author: LONG HOANG GIANG
'''
import os
import sys
from urlparse import urljoin
import traceback
# sys.path.append('C:/longhoanggiang/pyLib')
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
import lib
import logging
import sqlite3
import json
import workerpool
from lxml import etree

logging.basicConfig(level=logging.DEBUG, format='%(levelname)s :: %(asctime)s :: %(message)s', datefmt='%d/%m/%Y %H:%M:%S')


    
crawlResult = {}

class CrawlJob(workerpool.Job):
    
    def __init__(self, callback, *args):
        self.callback = callback
        self.args = args
        
    def run(self):
        detail, key = self.callback(*self.args)
        global crawlResult
        crawlResult[key] = detail

def createTable(connection):
    cursor = connection.cursor()
    cursor.execute("CREATE TABLE IF NOT EXISTS udv_content('id' INTEGER PRIMARY KEY AUTOINCREMENT, 'hash' VARCHAR(32), 'name' VARCHAR(200), 'content' BLOB, 'is_header' INTEGER DEFAULT 0)")
    cursor.execute("CREATE INDEX IF NOT EXISTS hash_index ON udv_content(hash)")
    cursor.execute("CREATE INDEX IF NOT EXISTS id_index ON udv_content(id)")
    cursor.close()

cookie = '''incap_ses_223_176145=IklQNdy2eACKiLNsiUEYA9yUzlMAAAAATVBymOAcCXrK/oBGES4TBg==; visid_incap_176145=arhlr5jVTkSnV/8cq5Ifc9yUzlMAAAAAQUIPAAAAAACepwJnbyltTOb4urtnvUOx; incap_ses_88_176145=FXisd6a2k2YiOTIQ/aM4AeiUzlMAAAAAJ/mUy7XdJXjAZZwrpTTPtA==; __utma=55973678.1610909839.1406047453.1406047453.1406047453.1; __utmb=55973678; __utmc=55973678; __utmz=55973678.1406047453.1.1.utmccn=(direct)|utmcsr=(direct)|utmcmd=(none); PHPSESSID=3lq402il6fkf8i40vbsuleuft4; __atuvc=1%7C30'''

class Crawler():
    
    def getDetail(self, url):
        try:
            response = lib.Web.load(url, cached=True, cookie=cookie)
            tree = response.build_tree(base_url=url, attr_width='100%')
            nodeContent = tree.xpath("//div[@class='content']")
            if len(nodeContent) > 0:
                nodeContent = nodeContent[0]
                lib.Etree.cleanNode(".//div[@class='pre-btn']", nodeContent)
                lib.Etree.cleanNode(".//div[@class='nxt-btn']", nodeContent)
                lib.Etree.cleanNode(".//div[@class='print-btn']", nodeContent)
                content = lib.Etree.tostring(nodeContent)
                if type(content).__name__ == 'unicode':
                    content = content.encode('utf-8')
                return content, lib.md5(url)
        except:
            print url
            print tree
            traceback.print_exc()
        return None, lib.md5(url)
    
    def getChapters(self, url):
        data = []
        tree = lib.Web.load(url, cached=False, cookie=cookie).build_tree(base_url=url)
        print lib.Etree.tostring(tree)
        print len(tree.xpath("//ul[@class='menu']/li/a"))
        lib.Etree.cleanNodeNextSibling(tree.xpath("//ul[@class='menu']/li[contains(., 'Useful Resources')]/.."), True)
        for node in tree.xpath("//ul[@class='menu']/li/a"):
            name = lib.stringify(node)
            if name == u'': continue
            link = urljoin(url, node.get('href'))
            print name
            data.append({'name': name, 'url': link, 'filename': lib.md5(link)})
        return data

    def execute(self, url, databasename=None):
        if databasename == None:
            databasename = raw_input("Enter database name: ")
        path = "/longhoanggiang/database/{0}".format(databasename)
        if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path), 0777)
        connection = sqlite3.connect(path)
        createTable(connection)
        chapters = self.getChapters(url)
        pool = workerpool.WorkerPool(size=10)
           
        for chapter in chapters:
            pool.put(CrawlJob(self.getDetail, chapter['url']))
        pool.shutdown()
        pool.wait()
           
        for chapter in chapters:
            name = chapter['name']
            if name == 'Android Quick Guide': break
            detail = crawlResult[lib.md5(chapter['url'])]
            if detail == None: continue
            detail = "<h4 id='title'>{0}</h4>{1}".format(name, detail)
            cursor = connection.cursor()
            cursor.execute("INSERT INTO udv_content('name', 'content') VALUES(?, ?)", [unicode(name), buffer(lib.compressStr(json.dumps(detail)))])
            cursor.close()
            print name.decode('iso-8859-1')
            print type(name.decode('iso-8859-1')).__name__
            print detail
               
        connection.commit()
        connection.close()
        logging.info("saved database in {0}".format(path))

if __name__ == '__main__':
    
    c = Crawler()
#     c.getDetail('http://www.roseindia.net/java/beginners/installing_java.shtml')
    c.execute('http://www.tutorialspoint.com/mongodb/index.htm', 'mongodbtutorial')
    
    logging.info("Finished")
    os._exit(1)
    