from concurrent.futures import ThreadPoolExecutor, as_completed, wait
import copy
from v5.Page import Page
import logging
import json
from v5.PageJSONEncoder import PageJSONEncoder

logging.basicConfig(level=logging.INFO,
                    format='%(asctime)s %(filename)s line:%(lineno)d [%(levelname)s] %(message)s',
                    datefmt='%a, %d %b %Y %H:%M:%S')

logger = logging.getLogger()


class Spider(object):
    def __init__(self,
                 spider_module_list,
                 thread_num,
                 request_items):
        self.spider_module_list = spider_module_list
        self.thread_num = thread_num
        self.request_items = request_items
        self.spider_modules_list = []

    def spider_thread_execute(self, pages, spider_module):
        res = []
        for page in pages:
            spider_module.execute(page)
            child_pages = page.get_child_pages()
            res.extend(child_pages)
        # for r in res:
        #     print(r.get_request())
        return res

    def run(self):
        # 爬虫框架一开始从某一个url或者某一些url开始爬取，对于每一个url,会生成很多子url, 这些url会形成多叉树, 或者森林, 爬虫框架做的就是构建并且遍历这个多叉树（森林）
        logger.info('初始化爬虫……')
        root_pages = []

        for request_item in self.request_items:
            root_page = Page()
            root_page.set_request(request_item)
            root_pages.append(root_page)

        pages = root_pages
        for spider_module in self.spider_module_list:
            logger.info('一共处理: ' + str(len(pages)) + '个页面')
            pages = self.thread_pool(pages, spider_module)
        with open('pages.json', 'w+') as f:
            pages_str = json.dumps(root_pages, indent=4, cls=PageJSONEncoder)
            f.write(pages_str)

    def thread_pool(self, pages, spider_module):
        future_list = []
        child_pages = []
        module_task_list = []

        thread_num = len(pages)
        if thread_num > 300:
            thread_num = 300

        for i in range(0, thread_num):
            module_task_list.append([copy.deepcopy(spider_module), []])

        i = 0
        for page in pages:
            module_task_list[i][1].append(page)
            i = (i + 1) % thread_num

        logger.info('开启线程池，线程数: ' + str(thread_num))
        with ThreadPoolExecutor(max_workers=thread_num) as t:
            for module_task in module_task_list:
                future = t.submit(self.spider_thread_execute, module_task[1], module_task[0])
                future_list.append(future)
            for future in as_completed(future_list):
                exception = future.exception()
                if exception is not None:
                    raise exception
                pages = future.result()
                child_pages.extend(pages)
        return child_pages