# -*- coding: utf-8 -*-
import math
from dateutil import tz
from datetime import timedelta
import scrapy
from scrapy.exceptions import IgnoreRequest
from zc_core.util.http_util import retry_request
from zc_core.model.items import Box
from zc_core.util.common import parse_date
from energyahead.rules import *
from energyahead.utils.login import SeleniumLogin
from zc_core.spiders.base import BaseSpider


class OrderSpider(BaseSpider):
    name = 'order'
    # 覆盖配置
    custom_settings = {
        'DOWNLOAD_DELAY': 20
    }
    # 常用链接
    routing_url = 'https://eportal.energyahead.com/rest/service/routing'

    def __init__(self, batchNo=None, *args, **kwargs):
        super(OrderSpider, self).__init__(batchNo=batchNo, *args, **kwargs)
        self.page_size = 500
        self.page_limit = math.floor(10000 / self.page_size)
        self.date_delta = 3

    def start_requests(self):
        login = SeleniumLogin()
        cookie_info = login.get_cookies()
        self.cookies = cookie_info.get('cookies', {})
        self.account = cookie_info.get('account', '')
        if not self.cookies or not self.account:
            self.logger.error('init cookie failed...')
            return
        self.token = self.account.get('token', '')
        if not self.token:
            self.logger.error('init token failed...')
            return

        # self.cookies = {'SHAREJSESSIONID': '4cdc101a-fea0-49f7-bf97-f30f3af4103b'}
        # self.token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJhdWQiOiJkb25nemhpd2VpQGNucGMuY29tLmNuIiwic3ViIjoiYnVzaW5lc3MiLCJpc3MiOiJhdXRoMCJ9.sBe3SpB2PRQ8n7zkOofqe661xWxEhJQW_H3ewIHdaEI'
        self.logger.info('init cookie: %s', self.cookies)
        self.logger.info('init token: %s', self.token)

        # 截止日期
        settings = get_project_settings()
        deadline_days = settings.getint('ORDER_DEADLINE_DAYS', 120)
        start_str = (datetime.now() - timedelta(days=deadline_days)).strftime('%Y-%m-%d')

        page = 1
        while parse_date(start_str) < datetime.now(tz.gettz('Asia/Shanghai')):
            end_date = parse_date(start_str) + timedelta(days=self.date_delta)
            end_str = end_date.strftime('%Y-%m-%d')
            req_bo = {
                'pageNo': page,
                'pageSize': self.page_size,
                'sn': '',
                'supplierName': '',
                'orderStatusStr': '',
                'dataSource': '',
                'isThird': '1',
                'createUserName': '',
                'procResultName': '',
                'createTimeStart': start_str,
                'createTimeEnd': end_str,
                'supplierConfirmTimeStart': '',
                'supplierConfirmTimeEnd': '',
                'cancelTimeStart': '',
                'cancelTimeEnd': '',
                'draftTimeStart': '',
                'draftTimeEnd': '',
                'purchaseMethod': '',
                'procGrade': '',
                'procOrigin': '',
                'procResultType': '',
                'procPurpose': '',
                'procType': '',
                'procApproach': '',
                'regionalCorporationName': '',
                'controlOrgCode': '',
                'itemLevel': '',
                'frameAgreementCode': '',
                'frameAgreementDetailsCode': '',
                'itemCategoryName': '',
                'refItemId': '',
                'skuName': '',
                'agentOrgName': '',
                'consigneeCompanyName': '',
                'procResultId': '',
                'procProposalId': '',
                'worksheetType': '',
                'isMine': '2'
            }
            # 请求页数
            yield scrapy.FormRequest(
                method='POST',
                url=self.routing_url,
                formdata={
                    'service': 'getOrdersAndDetailsPageList',
                    'token': self.token,
                    'QueryPurchaseOrdersPageListReqBO': json.dumps(req_bo)
                },
                meta={
                    'batchNo': self.batch_no,
                    'page': page,
                    'start': start_str,
                    'end': end_str,
                },
                cookies=self.cookies,
                callback=self.parse_order_item,
                errback=self.error_back,
                dont_filter=True
            )
            start_str = end_str

    # 处理sku列表
    def parse_order_item(self, response):
        meta = response.meta
        cur_page = meta.get('page')
        start = meta.get('start')
        end = meta.get('end')

        # 本页商品
        item_list, json_list, no_match_order_list = parse_order_item(response)
        self.logger.info('清单: time=<%s, %s>, page=%s, json=%s, order=%s, no_match=%s' % (
            start, end, cur_page, len(json_list), len(item_list), len(no_match_order_list)))
        if item_list:
            yield Box('order_item', self.batch_no, item_list)
        if no_match_order_list:
            yield Box('no_match_order_item', self.batch_no, no_match_order_list)
        if json_list:
            yield Box('order_json', self.batch_no, json_list)
        if not item_list and not json_list:
            self.logger.info('分页为空: time=<%s, %s>, page=%s' % (start, end, cur_page))

        # 分页查询
        if cur_page == 1:
            total = parse_order_total_page(response)
            if total:
                if total > self.page_limit:
                    self.logger.info('页数超限: start=%s, end=%s, total=%s' % (start, end, total))
                self.logger.info('页数: start=%s, end=%s, total=%s' % (start, end, total))
                # 分页请求
                for page in range(2, total + 1):
                    req_bo = {
                        'pageNo': page,
                        'pageSize': self.page_size,
                        'sn': '',
                        'supplierName': '',
                        'orderStatusStr': '',
                        'dataSource': '',
                        'isThird': '1',
                        'createUserName': '',
                        'procResultName': '',
                        'createTimeStart': start,
                        'createTimeEnd': end,
                        'supplierConfirmTimeStart': '',
                        'supplierConfirmTimeEnd': '',
                        'cancelTimeStart': '',
                        'cancelTimeEnd': '',
                        'draftTimeStart': '',
                        'draftTimeEnd': '',
                        'purchaseMethod': '',
                        'procGrade': '',
                        'procOrigin': '',
                        'procResultType': '',
                        'procPurpose': '',
                        'procType': '',
                        'procApproach': '',
                        'regionalCorporationName': '',
                        'controlOrgCode': '',
                        'itemLevel': '',
                        'frameAgreementCode': '',
                        'frameAgreementDetailsCode': '',
                        'itemCategoryName': '',
                        'refItemId': '',
                        'skuName': '',
                        'agentOrgName': '',
                        'consigneeCompanyName': '',
                        'procResultId': '',
                        'procProposalId': '',
                        'worksheetType': '',
                        'isMine': '2'
                    }
                    # 请求页数
                    yield scrapy.FormRequest(
                        method='POST',
                        url=self.routing_url,
                        formdata={
                            'service': 'getOrdersAndDetailsPageList',
                            'token': self.token,
                            'QueryPurchaseOrdersPageListReqBO': json.dumps(req_bo)
                        },
                        meta={
                            'batchNo': self.batch_no,
                            'page': page,
                            'start': start,
                            'end': end,
                        },
                        cookies=self.cookies,
                        callback=self.parse_order_item,
                        errback=self.error_back,
                        dont_filter=True
                    )
