# -*- coding: utf-8 -*-
import random
from scrapy import Request
from scrapy.exceptions import IgnoreRequest
from scrapy.utils.project import get_project_settings
from zc_core.util.http_util import retry_request
from zc_core.dao.sku_pool_dao import SkuPoolDao
from zc_core.dao.batch_dao import BatchDao
from zc_core.util.batch_gen import time_to_batch_no
from zc_core.util.done_filter import DoneFilter
from liaoning.rules import *
from zc_core.spiders.base import BaseSpider


class FullSpider(BaseSpider):
    name = 'full'
    # 常用链接
    item_url = 'http://218.60.151.1:8081/gp-webapp-mall/public/product/view/default?productId={}&sku={}'
    order_url = 'http://218.60.151.1:8081/gp-webapp-mall/public/product/view/load3?productId={}&page={}&_=1569207828299'

    def __init__(self, batchNo=None, *args, **kwargs):
        super(FullSpider, self).__init__(batchNo=batchNo, *args, **kwargs)
        # 创建批次记录
        BatchDao().create_batch(self.batch_no)
        # 避免重复采集
        self.done_filter = DoneFilter(self.batch_no)

    def start_requests(self):
        # cookies = SeleniumLogin().get_cookies()
        cookies = {'JSESSIONID': 'E64326AD02476C8DF5CDB69C9486DE84'}
        if not cookies:
            self.logger.error('init cookie failed...')
            return
        self.logger.info('init cookie: %s', cookies)

        pool_list = SkuPoolDao().get_sku_pool_list(fields={})
        self.logger.info('全量：%s' % (len(pool_list)))
        random.shuffle(pool_list)
        for sku in pool_list:
            sku_id = sku.get('_id')
            sold_count = sku.get('soldCount')
            catalog3_id = sku.get('catalog3Id')
            sp_sku_id = sku.get('supplierSkuId')
            offline_time = sku.get('offlineTime', 0)
            settings = get_project_settings()
            if offline_time > settings.get('MAX_OFFLINE_TIME', 2):
                self.logger.info('忽略: [%s][%s]', sku_id, offline_time)
                continue
            # 避免重复采集
            if self.done_filter.contains(sku_id) and not settings.get('FORCE_RECOVER', False):
                self.logger.info('已采: [%s]', sku_id)
                continue

            yield Request(
                url=self.item_url.format(sku_id, sp_sku_id),
                callback=self.parse_item_data,
                errback=self.error_back,
                cookies=cookies,
                meta={
                    'reqType': 'item',
                    'batchNo': self.batch_no,
                    'skuId': sku_id,
                    'catalog3Id': catalog3_id,
                    'soldCount': sold_count,
                    'supplierSkuId': sp_sku_id,
                },
            )

    # 处理ItemData
    def parse_item_data(self, response):
        meta = response.meta
        data = parse_item_data(response)
        if data:
            sku_id = data.get('skuId')
            self.logger.info('商品: [%s]' % sku_id)
            yield data
        else:
            self.logger.error('下架: [%s]' % meta.get('skuId'))
