'''
Created on 2014-3-3

@author: Seanna
'''
from fetchers.FetcherFactory import *
from Logger import *
from threadWorker import *
from PageFetcher import *

host_controller_map = {}

class Assigner:
    '''
        Please consider how to destroy the pool!!
    '''
    
    def __init__(self):
        self.__pool__ = ThreadPool((int)(Configurator.get_val('pool_size')), 100)
        self.__mutex__ = threading.Lock()
    
        
    @staticmethod
    def set_controller_map(key, value):
        global host_controller_map
        host_controller_map[key.strip()] = value.strip()
    
    @staticmethod 
    def get_controller(key):
        global host_controller_map
        return host_controller_map[key.strip()]
    
    def start(self):
        # self.__pool__.add_task(self.assign, None, None)
        logger.info("Main thread joining...")
        
        for i in range((int)(Configurator.get_val('pool_size'))):
            self.__pool__.add_task(Assigner.execute, Assigner.validate)
        
        self.__pool__.join()
        # the assign thread has stopped, so do the release work
        logger.info("Main thread destroy thread pool...")
        self.__pool__.destroy()
    
    @staticmethod
    def validate(**kws):
        # url = kws['u']
        # netloc = kws['nloc']
        # logger.info("validateing url:%s and netloc:%s"%(url, netloc))
        # return urlparse.urlsplit(url).netloc == netloc  # restrict the visit in one website
        return True
    
    @staticmethod
    def execute(*args, **kwds):
        # refer to http://blog.csdn.net/qinyilang/article/details/5484415 to know the usage of *args, **kwds
        # for keyword, value in kwargs.items():
        # for arg in args:
        # ars = args[0][0]
        global urls_to_visit
        validate = args[0]
        emp_count = 0
        mutex = threading.Lock()
        while emp_count < (int)(Configurator.get_val('loop_time')):
            url = None
            if mutex.acquire(1):
                if len(urls_to_visit) > 0:
                    url = urls_to_visit.pop()  # get one url information structure from the list
                mutex.release()
                
            if url == None:
                import time
                emp_count = emp_count + 1
                logger.info("sleep %d times to wait for urls_to_visit to be filled" % (emp_count))
                time.sleep(emp_count)  # sleep 2 seconds
                continue

            fetcher = FetcherFactory().getFetcher(url)
            if not fetcher == None:
                try:
                    fetcher.add_validation_rule(validate)  # here validate always return true
                    if fetcher.fetch():
                        fetcher.analysis()
                    del fetcher
                except:
                    (type, value, tb) = sys.exc_info()
                    logger.error("Exception throwed!!::")
                    logger.error(str(type))
                    logger.error(str(value))
                    import traceback
                    traceback.print_exception(type, value, tb)
                    logger.error(str(tb))
                    logger.error("Encounter exception when analysis: %s " % url)
                    continue

        logger.info("Thread finished to exit")
    
def feed(urls):
    global logger
    logger.info("begin to feed urls %s" % (urls))
    global urls_to_visit
    for url in urls:
        urls_to_visit.append(url)
        
def start():
    global logger
    logger.info("spider moving ...")
    assiner = Assigner()
    assiner.start()
