# -*- coding: utf-8 -*-
import copy
import random
from zc_core.spiders.base import BaseSpider
from scrapy.utils.project import get_project_settings
from scrapy.exceptions import IgnoreRequest
from zc_core.dao.sku_pool_dao import SkuPoolDao
from zc_core.pipelines.helper.material_code_helper import MaterialCodeHelper
from zc_core.util.batch_gen import time_to_batch_no
from zc_core.util.http_util import retry_request
from zc_core.dao.batch_dao import BatchDao
from zc_core.util.done_filter import DoneFilter

from chinagd.rules import *


class FullSpider(BaseSpider):
    name = "full"
    # 商品接口
    item_url = "https://www.neep.shop/rest/service/routing/nouser/qrySKUFromInterService"
    # 销量
    sales_url = 'https://www.neep.shop/rest/service/routing/nouser/qrySaleNumService'
    # MDM信息补充
    sku_url = 'https://neep.shop/rest/service/routing/nouser/qrySKUService'

    def __init__(self, batchNo=None, *args, **kwargs):
        super(FullSpider, self).__init__(batchNo=batchNo, *args, **kwargs)
        # 创建批次记录
        BatchDao().create_batch(self.batch_no)
        # 避免重复采集
        self.done_filter = DoneFilter(self.batch_no)
        self.mc_helper = MaterialCodeHelper()

    def start_requests(self):
        pool_list = SkuPoolDao().get_sku_pool_list(
            fields={'_id': 1, 'catalog3Id': 1, 'materialCode': 1, 'supplierId': 1, 'supplierName': 1, 'originPrice': 1,
                    'salePrice': 1, 'batchNo': 1, 'offlineTime': 1})
        self.logger.info('全量：%s' % (len(pool_list)))
        dist_list = [x for x in pool_list if not self.done_filter.contains(x.get('_id'))]
        self.logger.info('目标：%s' % (len(dist_list)))
        random.shuffle(dist_list)

        for sku in dist_list:
            sku_id = sku.get("_id")
            supplier_id = sku.get("supplierId")
            supplier_name = sku.get('supplierName')
            catalog3_id = sku.get('catalog3Id')
            sale_price = sku.get('salePrice')
            origin_price = sku.get('originPrice')
            material_code = sku.get('materialCode')

            # 避免无效采集
            settings = get_project_settings()
            offline_time = sku.get('offlineTime', 0)
            if offline_time > settings.get('MAX_OFFLINE_TIME', 2):
                self.logger.info('忽略: [%s][%s]', sku_id, offline_time)
                continue
            # 避免重复采集
            if self.done_filter.contains(sku_id) and not settings.get('FORCE_RECOVER', False):
                self.logger.info('已采: [%s]', sku_id)
                continue

            yield scrapy.FormRequest(
                url=self.item_url,
                method='POST',
                meta={
                    'reqType': 'item',
                    'batchNo': self.batch_no,
                    "skuId": sku_id,
                    'catalog3Id': catalog3_id,
                    'supplierId': supplier_id,
                    'supplierName': supplier_name,
                    'salePrice': sale_price,
                    'originPrice': origin_price,
                    'materialCode': material_code,
                },
                headers={
                    'Content-Type': 'application/x-www-form-urlencoded'
                },
                formdata={
                    'skuId': sku_id,
                    'supplierId': supplier_id
                },
                callback=self.parse_item_data,
                errback=self.error_back,
                priority=50
            )

    def parse_item_data(self, response):
        meta = response.meta
        sku_id = meta.get('skuId')

        item = parse_item_data(response)
        if item:
            # self.logger.info('数据: [%s]' % sku_id)
            supplier_id = item.get('supplierId')
            yield scrapy.FormRequest(
                url=self.sales_url,
                method='POST',
                meta={
                    'reqType': 'price',
                    'batchNo': self.batch_no,
                    'skuId': sku_id,
                    'itemData': copy.copy(item)
                },
                headers={
                    'Content-Type': 'application/x-www-form-urlencoded'
                },
                formdata={
                    'skuId': sku_id,
                    'supplierId': supplier_id
                },
                callback=self.parse_sales,
                errback=self.error_back,
                priority=80
            )
        else:
            self.logger.error('下架: sku=%s' % sku_id)

    def parse_sales(self, response):
        meta = response.meta
        sku_id = meta.get('skuId')
        item = parse_sales(response)
        self.mc_helper.fill(item)
        # MDM物料编码
        material_code = item.get('materialCode')
        supplier_id = item.get('supplierId')

        self.logger.info('商品: sku=%s mdm=%s' % (sku_id, material_code))
        yield item

        # if not material_code and sku_id and supplier_id:
        #     yield scrapy.FormRequest(
        #         url=self.sku_url,
        #         method='POST',
        #         meta={
        #             'reqType': 'group',
        #             'batchNo': self.batch_no,
        #             'skuId': sku_id,
        #         },
        #         headers={
        #             'Content-Type': 'application/x-www-form-urlencoded'
        #         },
        #         formdata={
        #             'skuId': sku_id,
        #             'skuLocation': '2',
        #             'supplierId': supplier_id,
        #         },
        #         callback=self.parse_mdm,
        #         errback=self.error_back,
        #         priority=80
        #     )

    # def parse_mdm(self, response):
    #     meta = response.meta
    #     sku_id = meta.get('skuId')
    #     item = parse_mdm(response)
    #     if item:
    #         item['batchNo'] = self.batch_no
    #         self.logger.info('MDM: [%s]' % sku_id)
    #         yield item


