# -*- coding: utf-8 -*-
# @Time    : 2024/04/11 16:23
# @Author  : Mr.su
# @FileName: miumiu.py
# @FileDesc: https://www.miumiu.com/ww/zh.html
from CollectSpiders.settings import LOG_FILE_PATH
from CollectSpiders.toots.methods import make_md5
from CollectSpiders.toots.connects import RedisClient
import scrapy, json, datetime, urllib.parse, logging, copy


# noinspection PyAbstractClass,PyMethodMayBeStatic
class CrawlSpider(scrapy.Spider):
    name, domain = 'miumiu', 'www.miumiu.com'
    redisClient = RedisClient()
    headers = {
        'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36',
    }
    custom_settings = {
        'CONCURRENT_REQUESTS': 2,
        'LOG_FILE': LOG_FILE_PATH, 'LOG_LEVEL': 'WARNING'
    }

    def __init__(self, data=None, *args, **kwargs):
        """
        传入自定义参数
        :param data: 数据列表
        :param args:
        :param kwargs:
        """
        super(CrawlSpider, self).__init__(*args, **kwargs)
        self.data = json.loads(urllib.parse.unquote(data)) if data else {
            "pid": "1000", "status": 1, "wait_time": 3600, "brand": "miumiu",
            "childs": {
                "1001": {
                    "sid": "1001", "label": "新品上市", "upload": 1, "status": 1, "webtype": "shopping",
                    "url": "https://www.miumiu.com/ww/zh/new-arrivals/c/10200EU"
                }
            }
        }

    def start_requests(self):
        column = list(self.data['childs'].values())[0]
        # TODO 每个栏目都要加一下参数
        config_dict = {
            '新品上市': {
                'link': 'https://2nyr2y6a02-3.algolianet.com/1/indexes/*/queries?x-algolia-api-key=fba9d163b6fca7a3efdb2021b7ab9b9d&x-algolia-application-id=2NYR2Y6A02',
                'params': {"requests": [
                    {"indexName": "PLP_MIUMIU_Online_WW",
                     "params": "facetFilters=%5B%5B%22Categories%3A10200EU%22%5D%5D&facets=%5B%22Categories%22%2C%22ColorGroup.zh_CN%22%2C%22MaterialGroup.zh_CN%22%2C%22ProductType%22%2C%22SizeGroup.zh_CN%22%5D&filters=OnlineStartDate%20%3C%3D%201712976915%20AND%20OnlineEndDate%20%3E%3D%201712976915&highlightPostTag=__%2Fais-highlight__&highlightPreTag=__ais-highlight__&hitsPerPage=24&maxValuesPerFacet=100&page={}&ruleContexts=%5B%22R-10200EU-1%22%2C%22R-10200EU-2%22%2C%22R-10200EU-3%22%2C%22R-10200EU-4%22%2C%22R-10200EU-5%22%2C%22R-10200EU-6%22%2C%22R-10200EU-7%22%2C%22R-10200EU-8%22%2C%22R-10200EU-9%22%2C%22R-10200EU-10%22%5D&tagFilters="},
                    {"indexName": "PLP_MIUMIU_Online_WW",
                     "params": "analytics=false&clickAnalytics=false&facets=%5B%22Categories%22%5D&filters=OnlineStartDate%20%3C%3D%201712976915%20AND%20OnlineEndDate%20%3E%3D%201712976915&highlightPostTag=__%2Fais-highlight__&highlightPreTag=__ais-highlight__&hitsPerPage=0&maxValuesPerFacet=100&page=0&ruleContexts=%5B%22R-10200EU-1%22%2C%22R-10200EU-2%22%2C%22R-10200EU-3%22%2C%22R-10200EU-4%22%2C%22R-10200EU-5%22%2C%22R-10200EU-6%22%2C%22R-10200EU-7%22%2C%22R-10200EU-8%22%2C%22R-10200EU-9%22%2C%22R-10200EU-10%22%5D"}]}}
        }
        if column['status'] == 1:
            column_url = config_dict[column['label']]['link']
            new_column_params = copy.deepcopy(config_dict[column['label']]['params'])
            new_column_params['requests'][0]['params'] = new_column_params['requests'][0]['params'].format(0)
            yield scrapy.Request(
                column_url, method='POST', body=json.dumps(new_column_params), headers=self.headers,
                callback=self.process_lis, meta={'column': column, 'params': config_dict[column['label']]['params'], 'pg': 0}
            )

    def process_lis(self, response):
        js = json.loads(response.text)
        gids = [i['ParentVariant'] for i in js['results'][0]['hits']]

        has_more = True
        for gid in gids:
            result = self.redisClient.conn.sadd('products', gid)
            if not result:
                has_more = False
                break
            detail_headers = self.headers
            detail_headers['Wcstoreid'] = '30251'
            detail_url = 'https://api.prada.com/frontend-il-anon/store/productview?partNumbers={}'.format(gid)
            yield scrapy.Request(
                detail_url, headers=detail_headers, callback=self.process_detail,
                meta={'column': response.meta['column']}
            )
        if has_more and gids:
            pg = response.meta['pg'] + 1
            new_column_params = copy.deepcopy(response.meta['params'])
            new_column_params['requests'][0]['params'] = new_column_params['requests'][0]['params'].format(pg)
            yield scrapy.Request(
                response.url, method='POST', body=json.dumps(new_column_params), headers=self.headers,
                callback=self.process_lis, meta={'column': response.meta['column'], 'params': response.meta['params'], 'pg': pg}
            )

    def process_detail(self, response):
        column = response.meta['column']
        js = json.loads(response.text)
        product = {
            '_id': '',
            'pid': self.data['pid'],
            'sid': column['sid'],
            'status': '0',  # 0:未下载  1:已下载  6:下载失败
            'webtype': column['webtype'],
            'url': 'https://www.miumiu.com/' + js['data'][0]['catalogEntryView'][0]['canonicalUrl'],
            'time': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
            'brand': self.data['brand'],
            'image': [],
            'domain': self.domain,
            'label': column['label'],
            'upload': column['upload'],
            'name': js['data'][0]['catalogEntryView'][0]['name']
        }
        for img_data in js['data'][0]['catalogEntryView'][0]['attachments']:
            product['image'].append(img_data['attachmentAssetPath'])
        product['_id'] = make_md5(product['image'][0])
        logging.warning('<{}>: 数据id: {}'.format(self.domain, product['_id']))
        yield product
