import time
from queue import Empty
from threading import Thread
from Spider.core.Parse import Parser
from Spider.core.UrlManage import FifoQueue
from Spider.core.Storage import SaveData
from Spider.core.Download import Downloader
from Spider.util.MyFuncClass import HttpRequest, Item
from Spider.Exception.exceptions import ObjectTypeError, FunctionNotFound


class Scheduler(object):
    def __init__(self):
        self.download = Downloader()
        self.queue = FifoQueue()
        self.parse = Parser()
        self.save = SaveData()

    def engine(self):
        self.parse.get_cookies(GET_COOKIE_URL)
        # 处理所有初始页面，每个城市的第一页
        Thread(target=self.handle_first_url).start()
        # 处理所有下载好的页面，从response_queue中取出，进行解析入库
        t2 = Thread(target=self.handle_success_response)
        t2.start()
        # 从redis数据库中取出待爬取对象，进行下载，并放入response queue中
        self.download.run_downloader()
        t2.join()
        self.end()
        
    def run(self):
        time.sleep(3)
        print('爬虫开始运行')
        self.engine()
    
    def end(self):
        number_dict = self.queue.filter.get_number()
        print('爬取页面总数量：' + str(number_dict.get('crawl_number')))
        print('爬取成功页面数量：' + str(number_dict.get('success_number')))
        print('爬取失败页面数量：' + str(number_dict.get('fail_number')))

    def handle_first_url(self):
        gen = self.parse.parse_page()
        while True:
            try:
                obj = next(gen)
            except StopIteration:
                print('所有城市名称解析完成')
                break
            else:
                self.queue.push(obj)

    def handle_success_response(self):
        while True:
            try:
                response = self.download.success_response_queue.get(timeout=20)
                if response is None:
                    continue
            except Empty:
                print('已无待解析数据')
                break
            else:
                callback_name = response.callback.__name__
                if hasattr(self.parse, callback_name):
                    callback = getattr(self.parse, callback_name)
                    if callable(callback):
                        gen = callback(response)
                        for obj in gen:
                            if not obj:
                                continue
                            elif isinstance(obj, HttpRequest):
                                self.queue.push(obj)
                            elif isinstance(obj, Item):
                                # 入库
                                self.save.do_insert(obj)
                            else:
                                raise ObjectTypeError('Parse must yield Item or HttpRequest or False')
                else:
                    raise FunctionNotFound(f'{callback_name} is not found in Parser')
        print('爬虫关闭')


if __name__ == '__main__':
    from multiprocessing import Process
    s = Scheduler()
    # s.run()
    # s.restart()

