import json
import re
from datetime import datetime
from scrapy import Request
from crawler.core import BaseSpider
from crawler.utils.func_tools import get_public_item, md5
from spiders.electronic_business.theoutnet.extractors import TheoutnetMainExtractor, TheoutnetExtractor

__all__ = ['TheoutnetMainSpider']


# 初步完成
class TheoutnetMainSpider(BaseSpider):
    name = "electronic_business_main_THEOUTNET"

    redis_key = 'electronic_business_main_THEOUTNET'
    default_origin_url = 'https://www.theoutnet.com/en-hk/shop/just-in/clothing'
    default_proxy_type = 'abroad'
    default_origin_header = {
        'Host': 'www.theoutnet.com',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
        'Accept-Language': 'zh-CN,zh;q=0.9',
    }

    custom_settings = {
        'CONCURRENT_REQUESTS': 20,
        'RETRY_TIMES': 1,
        'ITEM_PIPELINES': {
            'crawler.pipelines.ElectronicBusinessPipeline': 100,
        },
        'DOWNLOADER_MIDDLEWARES': {
            # 代理
            'crawler.middlewares.proxy.ProxyMiddleware': 200,
            # 自定义
            'spiders.electronic_business.theoutnet.middlewares.CookiesMiddleware': 300,

        }
    }

    def _get_request_url(self, task):
        url = task['info']['url']
        category = task['info']['category']
        sex = task['info']['sex']
        task['sex'] = sex
        task['category'] = category
        return url

    def parse(self, response, **kwargs):
        self.logger.info(f'Request End, URL: {response.url}')
        task = response.meta['task']
        json_code = re.compile('<script>window.state=(.*?)</script>', re.S).findall(response.text)
        if json_code:
            if response.meta.get('more_page'):
                total_page = 0
            else:
                total_page = json.loads(json_code[0])["plp"]["listing"]["response"]["body"]["totalPages"]
            result = TheoutnetMainExtractor.get_item(response, json_code[0])
            for info_dict in result:
                url = info_dict['url']
                yield Request(
                    url=url,
                    headers=self.default_origin_header,
                    meta={
                        'task': task,
                        'info_dict': info_dict,
                        'proxy_type': self.default_proxy_type
                    },
                    callback=self.parse_detail,
                    dont_filter=True
                )
            if total_page > 1:
                for page in range(2, total_page):
                    yield Request(
                        url=response.request.url + f"?pageNumber={page}",
                        headers=self.default_origin_header,
                        meta={
                            'task': task,
                            'more_page': True,
                            'proxy_type': self.default_proxy_type

                        },
                        callback=self.parse
                    )

    def parse_detail(self, response):
        self.logger.info(f'Request End, URL: {response.url}')
        task = response.meta['task']
        json_code = re.compile('<script>window.state=(.*?)</script>', re.S).findall(response.text)
        if json_code:
            color, product_des, image_list, composition, size_stock_list = TheoutnetExtractor.get_item(
                response,
                json_code[0])
            info_dict = response.meta['info_dict']
            info_dict['color'] = color
            info_dict['atlas'] = [f'http://www.theoutnet.com/variants/images/{info_dict["spu_id"]}/{_}/w1020_q80.jpg'
                                  for _ in ['F', 'R', 'E', 'D']]
            info_dict['size_stock'] = size_stock_list
            # info_dict['detail_info'] = size_stock_list
            info_dict['composition'] = composition
            info_dict['product_des'] = product_des
            # info_dict['specifications'] = spec_list
            info_dict['origin'] = 'theoutnet_HKG'
            info_dict['md5_value'] = md5(info_dict['spu_id'])
            info_dict['sync'] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
            item = get_public_item(task, table_name='shedana_data')
            item['item'] = info_dict
            yield item
        else:
            raise ValueError
