'''
Created on 2012-2-3

@author: xiaokan
'''
from xiaokan.CrawlingQueue import CrawlingQueue

class Crawler(object):
    '''
    classdocs
    '''


    def __init__(self):
        
        self.queue = CrawlingQueue()
        self.max_number_of_pages = 0
        self.max_number_of_threads = 0
        
    def set_max_number_of_pages(self, _max_number_of_pages):
        self.max_number_of_pages = _max_number_of_pages  
        
    def set_max_number_of_threads(self, _max_number_of_threads):                 
        self.max_number_of_threads = _max_number_of_threads         
                                           
    def feed_starter_addrs(self, addrs):
        for _addr in addrs:
            self.queue.enqueue(_addr)
        pass
        
    def crawl(self):
        q = self.queue
        while True:
            if q.count >= self.max_number_of_pages:
                break
            
            head = None
            while(True):            #Get a node from the head of queue, if unavailable, keep getting
                print "Dequeueing"
                head = q.dequeue()
                if(head != None):
                    break
            if head == None: 
                return
            
            links = head.links
            for link in links:
                q.enqueue(link, _depth=head.depth + 1)
                if q.count >= self.max_number_of_pages:
                    break
        
                
            
            
            
