# # -*- coding: utf-8 -*-
# import random
# import scrapy
# from scrapy import Request
#
# from zc_core.util.http_util import *
# from crccmall.rules import *
# from zc_core.dao.spu_pool_dao import SpuPoolDao
# from zc_core.dao.batch_dao import BatchDao
# from zc_core.util.batch_gen import time_to_batch_no
# from zc_core.util.done_filter import DoneFilter
# from datetime import datetime
# from zc_core.dao.item_data_dao import ItemDataDao
#
#
# class FullSpider(BaseSpider):
#     name = 'full_soldCount'
#     custom_settings = {
#         'CONCURRENT_REQUESTS': 8,
#         # 'DOWNLOAD_DELAY': 0.1,
#         'CONCURRENT_REQUESTS_PER_DOMAIN': 8,
#         'CONCURRENT_REQUESTS_PER_IP': 8,
#     }
#
#     item_url = 'https://www.crccmall.com/api/merchandise/ecGoods/crccPortal/queryGoodsInfo?uuids={}'
#
#     def __init__(self, batchNo=None, *args, **kwargs):
#         super(FullSpider, self).__init__(*args, **kwargs)
#         if not batchNo:
#             self.batch_no = time_to_batch_no(datetime.now())
#         else:
#             self.batch_no = int(batchNo)
#         # 创建批次记录
#         BatchDao().create_batch(self.batch_no)
#         # 避免重复采集
#         self.done_filter = DoneFilter(self.batch_no, fields={'spuId': 1}, filter_key='spuId')
#
#     def start_requests(self):
#         item_list = ItemDataDao().get_batch_data_list(self.batch_no,
#                                                       fields={'_id': 1, 'batchNo': 1,'spuId': 1,'soldCount':1})
#         # item_list = [i for i in item_list if i.get('catalog2Name') in ["办公耗材", "办公设备"]]
#         item_list = [i for i in item_list if i.get('soldCount') == None]
#         self.logger.info('全量：%s' % (len(item_list)))
#         for spu in item_list:
#             sku_id = spu.get('_id')
#             spu_id = spu.get('spuId')
#             # offline_time = spu.get('offlineTime', 0)
#             # settings = get_project_settings()
#             # if offline_time > settings.get('MAX_OFFLINE_TIME', 2):
#             #     self.logger.info('忽略: [%s][%s]', spu_id, offline_time)
#             #     continue
#             # # 避免重复采集
#             # if self.done_filter.contains(spu_id) and not settings.get('FORCE_RECOVER', False):
#             #     self.logger.info('已采: [%s]', spu_id)
#             #     continue
#             # 采集商品
#             yield Request(
#                 url=self.item_url.format(spu_id),
#                 callback=self.parse_item_data,
#                 errback=self.error_back,
#                 priority=260,
#                 meta={
#                     'reqType': 'item',
#                     'batchNo': self.batch_no,
#                     'spuId': spu_id,
#                     'skuId': sku_id
#                 },
#             )
#
#     # 处理ItemData
#     def parse_item_data(self, response):
#         # 处理商品详情页
#         # print(response)
#         for data in parse_sold_count(response):
#             self.logger.info('商品: [%s]' % data.get('skuId'))
#             yield data
#

