'''
Created on 2011-12-27

@author: ajwang
'''
from Queue import Queue
from threading import Condition, Thread
from urlparse import urlparse
import socket
from const import CONST
from pagelinks import PageLinks
from collections import deque
import time
from urlcontainer import UrlContainer
import datetime

socket.setdefaulttimeout(5.0)


class Worker(Thread):
    def __init__(self, tasks, _id):
        Thread.__init__(self)
        self.id = _id
        self.tasks = tasks
        self.daemon = True
        self.start()
    
    def run(self):
        while True:
            print 'Thread -', self.id
            func, args, kargs = self.tasks.get()
#            t1 = datetime.datetime.now()
            try: func(*args, **kargs)
            except Exception, e: print e
#            t2 = datetime.datetime.now()
#            print self.id, '-', t2 - t1, '-', args[0]
            self.tasks.task_done()
#            time.sleep(1)

class ThreadPool:
    def __init__(self, num_threads):
        self.cond = Condition
        self.tasks = Queue(num_threads)
        for _ in range(num_threads): Worker(self.tasks, _)

    def add_task(self, func, *args, **kargs):
        self.tasks.put((func, args, kargs))

    def wait_completion(self):
        self.tasks.join()



class Crawler(object):
    def __init__(self, *_seeds):
        self.pool = ThreadPool(10)
        self.cond = Condition()
        self.url_container = UrlContainer()
        _seeds = [i for i in _seeds]
        self.seeds = deque(_seeds)
    
    def get_netloc_path(self, _url):
        _u = urlparse(_url)
        _path = _u.path
        if _u.query != '':
            _path  = _path + '?' + _u.query
        return [_u.netloc, _path]
        
    def _add(self, _base_url): 
        _links = PageLinks(_base_url).get_html_links()
        for _link in _links:
            if self.url_container.add(_link):
                self.seeds.append(_link)
    
    def crawl(self):
        while True:
            try:
                _base_url = self.seeds.popleft()
                self.pool.add_task(self._add, _base_url)
                #print time.ctime(), len(self.seeds), self.pool.tasks.unfinished_tasks, _base_url
            
            except IndexError:
                if self.pool.tasks.unfinished_tasks == 0 and len(self.seeds) == 0:
                    break
            
            except Exception as e:
                print e
                pass
        
        self.pool.wait_completion()
