# -*- coding: utf-8 -*-
import random
import scrapy
from scrapy import Request
from cgnpc.rules import *
from zc_core.dao.spu_pool_dao import SpuPoolDao
from zc_core.dao.batch_dao import BatchDao
from zc_core.util.batch_gen import time_to_batch_no
from zc_core.util.done_filter import DoneFilter
from datetime import datetime
from zc_core.spiders.base import BaseSpider


class FullSpider(BaseSpider):
    name = 'full'

    item_url = 'https://mall.cgnpc.com.cn/scm-cgn-oauth-web/obs/business/product/managerView/getProduct?productId={}&returnProduct=true&returnSku=true&returnSpec=true&returnSKuSpec=true'

    def __init__(self, batchNo=None, *args, **kwargs):
        super(FullSpider, self).__init__(batchNo=batchNo, *args, **kwargs)
        if not batchNo:
            self.batch_no = time_to_batch_no(datetime.now())
        else:
            self.batch_no = int(batchNo)
        # 创建批次记录
        BatchDao().create_batch(self.batch_no)
        # 避免重复采集
        self.done_filter = DoneFilter(self.batch_no, fields={'spuId': 1}, filter_key='spuId')

    def start_requests(self):

        settings = get_project_settings()
        while_list = settings.get("CATALOG_WHITE_LIST")
        if while_list:
            pool_list = SpuPoolDao().get_spu_pool_list(
                fields={'_id': 1, 'soldCount': 1, 'batchNo': 1, 'offlineTime': 1, 'catalog1Id': 1, 'catalog1Name': 1,
                        'catalog2Id': 1, 'catalog2Name': 1, 'catalog3Id': 1, 'catalog3Name': 1},
                query={"$or": while_list})
        else:
            pool_list = SpuPoolDao().get_spu_pool_list(
                fields={'_id': 1, 'soldCount': 1, 'batchNo': 1, 'offlineTime': 1, 'catalog1Id': 1, 'catalog1Name': 1,
                        'catalog2Id': 1, 'catalog2Name': 1, 'catalog3Id': 1, 'catalog3Name': 1})
        self.logger.info('全量：%s' % (len(pool_list)))
        # print("pool_list:", pool_list)
        for spu in pool_list:
            spu_id = spu.get('_id')
            offline_time = spu.get('offlineTime', 0)
            settings = get_project_settings()
            if offline_time > settings.get('MAX_OFFLINE_TIME', 2):
                self.logger.info('忽略: [%s][%s]', spu_id, offline_time)
                continue
            # 避免重复采集
            if self.done_filter.contains(spu_id) and not settings.get('FORCE_RECOVER', False):
                self.logger.info('已采: [%s]', spu_id)
                continue
            # 采集商品
            yield Request(
                url=self.item_url.format(spu_id),
                callback=self.parse_item_data,
                errback=self.error_back,
                priority=260,
                meta={
                    'reqType': 'item',
                    'batchNo': self.batch_no,
                    'spuId': spu_id,
                    'brandModel': spu.get('brandModel'),
                    'catalog1Id': spu.get('catalog1Id'),
                    'catalog1Name': spu.get('catalog1Name'),
                    'catalog2Id': spu.get('catalog2Id'),
                    'catalog2Name': spu.get('catalog2Name'),
                    'catalog3Id': spu.get('catalog3Id'),
                    'catalog3Name': spu.get('catalog3Name'),
                    'soldCount': spu.get('soldCount')
                },
            )

    # 处理ItemData
    def parse_item_data(self, response):
        # 处理商品详情页
        for data in parse_item_data(response):
            self.logger.info('商品: [%s]' % data.get('skuId'))
            yield data
