#!/user/bin/python
# -*- coding: utf-8 -*-
'''
 @Time    : 2018/5/20 20:47
 @File    : crawler_config.py
 @desc    :
'''
from commonSpiders.creeper.parses.item_parse import ItemParser


class CrawlerGroupConfig(object):

    def __init__(self, task_info):
        self.group_key = task_info.get('task_id', '')
        self.plan_id = task_info.get('plan_id', '')

        self.spider_key = task_info.get('spider_key', '')
        self.count = task_info.get('count', 0)
        self.start_url = task_info.get('start_url', '')
        self.start_time = task_info.get('start_time', '')
        self.end_time = task_info.get('end_time', '')
        self.allow_urls = task_info.get('allow_urls', '')
        self.deny_urls = task_info.get('deny_urls', '')

        self.settings = task_info.get('settings', {})
        self.item_info_parsers = []
        item_info_list = task_info.get('item_list', [])
        self.set_group_items(item_info_list)

    def set_group_items(self, item_info_list):

        if not isinstance(item_info_list, list):
            return

        for item_info in item_info_list:

            key = item_info.get('key', None)
            html_url_reg = item_info.get('html_url_reg', None)
            item_meta_parser_list = item_info.get('item_meta_parser_list', None)
            rule_parser_key = item_info.get('parse_ruler', 'reg')
            item_parse = ItemParser(self.group_key, html_url_reg, rule_parser_key=rule_parser_key, plan_id=self.plan_id, key=key)
            item_parse.init_item_meta_parsers(item_meta_parser_list)
            item_data_source = item_info.get('data_source', None)
            item_parse.set_data_source(item_data_source)
            self.item_info_parsers.append(item_parse)
