# coding: utf-8

from nyawc.Options import Options
from nyawc.Crawler import Crawler
from nyawc.CrawlerActions import CrawlerActions
from nyawc.http.Request import Request
from nyawc.QueueItem import QueueItem
from UrlFilter import UrlFilter

def cb_crawler_before_start():
    print("Crawler started.")

def cb_crawler_after_finish(queue):
    print("Crawler finished.")
    #print("Found " + str(len(queue.get_all(QueueItem.STATUS_FINISHED))) + " requests.")
    queue_items = queue.get_all(QueueItem.STATUS_FINISHED)
    for queue_item in list(queue_items.items()):
        request = queue_item[1].request
        print request.url
        print UrlFilter(request.url,request.method).check_url()

def cb_request_before_start(queue, queue_item):
    print("Starting: {}".format(queue_item.request.url))
    return CrawlerActions.DO_CONTINUE_CRAWLING

def cb_request_after_finish(queue, queue_item, new_queue_items):
    print("Finished: {}".format(queue_item.request.url))
    return CrawlerActions.DO_CONTINUE_CRAWLING

def cb_request_in_thread_after_finish(queue_item, new_requests):
    ret_list = []
    for new_request in new_requests:
        new_url = new_request.url
        if not UrlFilter(new_url).is_real_static() :
            ret_list.append(new_request)
    return ret_list

options = Options()

options.callbacks.crawler_before_start = cb_crawler_before_start # Called before the crawler starts crawling. Default is a null route.
options.callbacks.crawler_after_finish = cb_crawler_after_finish # Called after the crawler finished crawling. Default is a null route.
options.callbacks.request_before_start = cb_request_before_start # Called before the crawler starts a new request. Default is a null route.
options.callbacks.request_after_finish = cb_request_after_finish # Called after the crawler finishes a request. Default is a null route.
options.callbacks.request_in_thread_after_finish = cb_request_in_thread_after_finish

#options.performance.max_threads = 1
u = "http://mutepig.club"
#u = "http://127.0.0.1/test.php"
crawler = Crawler(options)
crawler.start_with(Request(u))
