#!/user/bin/python
# -*- coding: utf-8 -*-
'''
 @Time    : 2018/4/15 19:37
 @File    : manager.py
 @desc    :
'''
import datetime
import threading

from scrapy.utils.project import get_project_settings

from commonSpiders.creeper.crawlermanagers.crawler_config import CrawlerGroupConfig
from commonSpiders.creeper.crawlermanagers.custom_crawlerprocess import CustomCrawlerProcess
from commonSpiders.creeper.spiders.commcon_spider import BaseSpider
from commonSpiders.net.extend_context import ContextExtend
from commonSpiders.utils.server_utils import get_guid_by_mac, get_mac_address
from commonSpiders.utils.singleton import singleton

MANAGER_KEY = 'crawler_process_manager'


@singleton
@ContextExtend.extend(MANAGER_KEY)
class CrawlerProcessManager(object):

    INIT_CRAWLER_PROCESS_FLAG = False

    def __init__(self):
        # 设置管理器的settings
        self.settings = get_project_settings()
        # 设置管理器的guid
        self.guid = self.settings.get('CRAWLER_PROCESS_MANAGER', 'default')
        # 获取本机mac地址
        self.mac = get_mac_address()
        # 爬虫进程数量
        self.crawler_process_num = self.settings.get('CRAWLER_PROCEESS_NUM', 1)
        # 爬虫进程dict
        self.process_dict = {}
        #
        self.task_process = {}
        print '初始化爬虫进程管理器'

    def get_crawler_process(self, crawler_process_guid):
        '''
        根据爬虫进程key获取爬虫进程
        :param crawler_process_guid:
        :return:
        '''
        return self.process_dict.get(crawler_process_guid, None)

    def create_crawler_process(self, name, settings):
        '''
        根据爬虫进程key和爬虫设置创建爬虫进程
        :param name:
        :param settings:
        :return:
        '''
        if not name:
            return False
        print '初始化爬虫进程'
        guid = get_guid_by_mac(name)
        process = CustomCrawlerProcess(settings, guid=guid)
        process.config_spider_class("common", BaseSpider)
        threading.Thread(target=self.run_crawler_process, args=(process,)).start()
        self.process_dict.setdefault(guid, process)
        return True

    def _start_craw_manager(self):
        '''
        启动爬虫进程管理器
        :return:
        '''
        for i in range(self.crawler_process_num):
            self.create_crawler_process(i, self.settings)
        self.INIT_CRAWLER_PROCESS_FLAG = True

    @staticmethod
    def run_crawler_process(process):
        '''
        启动爬虫进程
        :param process:
        :return:
        '''
        print '启动爬虫进程[%s][%s]' % (process.guid, datetime.datetime.now())
        process.start(False)

    def extend_start(self):
        '''
        应用扩展初始化函数
        :return:
        '''
        self._start_craw_manager()

    def start_task(self, task_info):
        '''
        开始新任务
        :param settings:
        :param task_info:
        :param item_info:
        :return:
        '''
        crawler_num = task_info.get('crawler_num', 1)

        group_config = CrawlerGroupConfig(task_info)
        self.distribute_crawlers(crawler_num, group_config)

    def distribute_crawlers(self, crawler_num, group_config):

        process_list = self.process_dict.values()
        process_list_len = len(process_list)
        for index in range(crawler_num):
            group_key = group_config.group_key
            process_index = index % process_list_len
            process = process_list[process_index]
            process.add_crawlers_by_spider_key(group_config)
            self.task_process.setdefault(group_key, [])
            self.task_process[group_key].append(process)
