# -*- coding: utf-8 -*-
'''
Created on Sep 6, 2014

@author: TRAM ANH
'''
import os
import sys
from urlparse import urljoin
import workerpool
# sys.path.append('C:/longhoanggiang/pyLib')
sys.path.append(os.path.join(os.path.dirname(__file__), '../'))
import lib
import logging
import sqlite3
import json

logging.basicConfig(level=logging.DEBUG, format='%(levelname)s :: %(asctime)s :: %(message)s', datefmt='%d/%m/%Y %H:%M:%S')

class Crawler():
    
    def load(self, item):
        lib.Web.load(item['url'], cached=True)
    
    def getChapters(self, url, storyName):
        tree = lib.Web.load(url, cached=True).build_tree()
        data = []
        chapterNodes = tree.xpath("//div[@align='center']/div/div[3]/div[1]/div[4]/div[2]/table//fieldset//a")
        if len(chapterNodes) == 0:
            data.append({'name': storyName, 'url': url})
        for chapterNode in chapterNodes:
            name = lib.stringify(chapterNode)
            link = chapterNode.get('href')
            if link == '': continue
            link = urljoin(url, link)
            if name != storyName:
                name = "{0} - {1}".format(storyName, name)
            data.append({'name': name, 'url': link})
        return data
    
    def getDetail(self, url, name):
        tree = lib.Web.load(url, cached=True).build_tree()
        contentNode = tree.xpath("//div[@align='center']//div[@id='id8r63']")[0]
        lib.Etree.cleanNode(".//table", contentNode)
#         del contentNode.attrib['style']
#         del contentNode.attrib['id']
        content = lib.Etree.tostring(contentNode)
        return "<strong>{0}</strong><br /><hr /><br />{1}".format(name, content)
    
    def createTable(self, connection):
        cursor = connection.cursor()
        cursor.execute("CREATE TABLE IF NOT EXISTS udv_content('id' INTEGER PRIMARY KEY AUTOINCREMENT, 'hash' VARCHAR(32), 'name' VARCHAR(200), 'content' BLOB, 'is_header' INTEGER DEFAULT 0)")
        cursor.execute("CREATE INDEX IF NOT EXISTS hash_index ON udv_content(hash)")
        cursor.execute("CREATE INDEX IF NOT EXISTS id_index ON udv_content(id)")
        cursor.close()
    
    def getStories(self, startPage, endPage):
        dataPath = "/longhoanggiang/database/tnllmvn{0}{1}".format(startPage, endPage)
        if os.path.exists(dataPath):
            os.unlink(dataPath)
        connection = sqlite3.connect(dataPath)
        self.createTable(connection)
        data = []
        for page in range(startPage, endPage + 1):
            url = "http://truyen.com/truyen/?func=main&cat=29&page={0}".format(page)
            tree = lib.Web.load(url, cached=True).build_tree()
            for node in tree.xpath("//div[@align='center']/div/div[3]//table[2]//tr[position()>1]/td[1]/a"):
                name = lib.stringify(node)
                link = node.get('href')
                if link == '': continue
                link = urljoin(url, link)
                data.append({'name': name, 'url': link})
        
        #preload for cache
        pool = workerpool.WorkerPool(size = 5)
        pool.map(self.load, data)
        pool.shutdown()
        pool.wait()
        
        chapters = []
        for item in data:
            for chap in self.getChapters(item['url'], item['name']):
                chapters.append(chap)
                
        #preload for cache
        pool = workerpool.WorkerPool(size = 5)
        pool.map(self.load, chapters)
        pool.shutdown()
        pool.wait()
        
        for chapter in chapters:
            detail = self.getDetail(chapter['url'], chapter['name'])
            name = chapter['name']
            print type(name).__name__
            print name
            print detail[:200]
            print type(detail).__name__
            cursor = connection.cursor()
#             cursor.execute("INSERT INTO udv_content('name', 'content') VALUES(?, ?)", [unicode(chapter['name'], 'utf-8'), buffer(lib.compressStr(json.dumps(detail)))])
            cursor.execute("INSERT INTO udv_content('content') VALUES(?)", [buffer(lib.compressStr(json.dumps(detail.decode('cp1252'))))])
            cursor.close()
            
            
        connection.commit()
        connection.close()
                

if __name__ == '__main__':
    
    c = Crawler()
    c.getStories(1, 2)
#     c.getDetail('http://lmvn.com/truyen/index.php?func=viewpost&id=KQTM7EeysCS05sSaETBM7ZqnMp90O8dZ')
    
    logging.info("Finished")
    os._exit(1)
                