#! /usr/bin/env python
# -*- coding: utf-8 -*-

from vavava import util,workshop,workqueue
from vavava import httpclient
import dbhelper_cl
import urlparse
class FetcherCl:
    def __init__(self,url="",logger=None):
        self.__logger = util.LogAdapter(logger)
        self.url = url
        self.__http = httpclient.HttpClient()
        self.__http.AddProxy( {"http":"http://127.0.0.1:8087"} )
        self.__charset = "gbk"
    def set_logger(self,logger):
        self.__logger = util.LogAdapter(logger)
    def get(self,url,category,db):
        self.url = url
        urlbase = urlparse.urlparse(url)
        try:
            data = self.__http.Get(url)
        except Exception as e:
            self.__logger.debug("http request failed.")
            return
        html = data.decode(self.__charset,"ignore")
        reg_str = r'<td[^<]*>[^<]*<h3>\s*<a [^>]*href="(?P<url>[^"]*)"[^>]*>(?P<title>[^<]*)</a>\s*</h3>.*?<div[^<]*>(?P<date>[^<]*)</div>'
        urls = util.reg_helper(html,reg_str)
        if len(urls) < 40:
            self.__logger.warning("%s\n%s",url,html)
            assert len(urls) >= 40
        for url in urls:
            item_url = urlparse.urljoin("http://%s/"%urlbase.hostname,url[0])
            url_row = dbhelper_cl.DBRowUrl(
                desc       = url[1],
                url        = item_url,
                track_time = time.strptime(url[2], "%Y-%m-%d"),
                category   = category
            )
            if db.insert(url_row):
                self.__logger.info("New item:%s",url_row.desc)

class FecthWork(workqueue.Work):
    def __init__(self,url,category,dbpath="fetcher_cl.db3",log=None):
        workqueue.Work.__init__(self,log=log)
        self.category = category
        self.dbpath = dbpath
        self.url = url
        self.fetcher = FetcherCl(logger=log)
    def do(self,worker):
        try:
            self.log.debug("Parsing:%s",self.url)
            self.fetcher.set_logger(self.log)
            db = dbhelper_cl.DBUrl(path=self.dbpath,logger=self.log)
            self.fetcher.get(self.url,self.category,db)
        except Exception as e:
            self.log.exception(e)

import time
def go(seed_urls,ws,logger=None):
    ws.StartWork()
    for category_index in range(len(seed_urls)):
        seed_url = seed_urls[category_index]
        for i in range(30):
            ws.QueueWork(FecthWork(url=seed_url%i,category=category_index,log=logger))
    while True:
        info = ws.GetInfo()
        logger.debug("[ws_info] %d,%d", info.prepared_work_size, info.buffered_work_size)
        if info.prepared_work_size == 0:
            break
        time.sleep(1)

def test(seed_urls,logger=None):
    db = dbhelper_cl.DBUrl(path="test.db3",logger=logger)
    for category_index in range(len(seed_urls)):
        seed_url = seed_urls[category_index]
        fetcher = FetcherCl(url=seed_url%1,logger=logger)
        fetcher.get(url=seed_url%1, category=category_index,db=db)

def run_spider(deepth=5,db_path="fetcher_cl.db3"):
    logger = util.initlog("fetcher_cl.log")
    logger.debug('fetcher_cl(%f):start fetcher,deepth=%d',util._interval_timer(),deepth)
    try:
        url_list = open("url_list.urls")
        seed_urls = url_list.readlines()
        ws = workshop.WorkShop(maxsize=10,log=logger)
        ws.StartWork()
        for category_index in range(len(seed_urls)):
            seed_url = seed_urls[category_index]
            for i in range(deepth):
                url = seed_url%(i+1)
                logger.debug("add item:%s"%url)
                ws.QueueWork(FecthWork(url=url,category=category_index, dbpath=db_path,log=logger))
        while True:
            info = ws.GetInfo()
            logger.debug("[ws_info] %d,%d", info.prepared_work_size, info.buffered_work_size)
            time.sleep(1)
    except(KeyboardInterrupt):
        logger.debug('main _thread(%f):stop fetcher',util._interval_timer())
        if ws: ws.StopWork(30)
        logger.debug('main _thread(%f):stopped fetcher',util._interval_timer())
def run_checker(key="",db_path="fetcher_cl.db3"):
    print "try to find '%s'" % key
    db = dbhelper_cl.DBUrl(path=db_path)
    result_set,total = db.query_by_desc(key=key)
    if result_set is None or len(result_set) == 0:
        print "no recode found for %s" % key
        return
    pos = 0
    for result in result_set:
        pos += 1
        print "[%d|%s] %s ==== %s" %( pos, time.strftime("%m/%d/%Y %X",result.create_time), result.desc, result.url )

def foo(argv):
    if len(argv) > 1:
        if argv[1] == '-q':
            run_checker(key = sys.argv[2], db_path = argv[3] )
            return
        elif argv[1] == '-r':
            run_spider( deepth=int(argv[2]), db_path=argv[3] )
        else:
            print """
            %s -q keyword dbpath
            %s -r deepth  dbpath
            """

if __name__ == "__main__":
    import sys
    foo(sys.argv)

