import json
import urllib
from urllib.parse import urljoin

import scrapy
from scrapy import Request
from scrapy_splash import SplashRequest


class YslSpider(scrapy.Spider):
    name = 'ysl'
    allowed_domains = ['www.ysl.cn']
    start_urls = ['https://www.ysl.cn']
    list_product_url = 'https://www.ysl.cn/rest/default/V1/catalog/productListByUrl'
    product_url = 'https://www.ysl.cn/products'

    def start_requests(self):
        for url in self.start_urls:
            yield SplashRequest(url, self.parse, args={'wait': 3})

    def parse(self, response):
        links_names = [t.extract().strip() for t in
                       response.xpath('//*[@id="__layout"]/div/div[3]/div[1]/div[2]/div[2]/ul/li/a/text()')]
        links_node = [t for t in response.xpath('//*[@id="__layout"]/div/div[3]/div[1]/div[2]/div[2]/ul/li')]
        # links_hrefs = [response.urljoin(t.extract().strip()) for t in
        #                response.xpath('//*[@id="__layout"]/div/div[3]/div[1]/div[2]/div[2]/ul/li/a/@href')]
        names_nodes = dict(zip(links_names, links_node))
        for (name, node) in names_nodes.items():
            if name in ('女士', '男士', '礼品'):
                links_hrefs = [t.extract().strip()[12:] for t in
                               node.xpath('//*[@id="__layout"]/div/div[3]/div[1]/div[2]/div[2]/ul/li[1]/ul/li/a/@href')]
                for href in links_hrefs:
                    params = {
                        'url': href,
                        'page': '1',
                    }
                    url = f'{self.list_product_url}/?{urllib.parse.urlencode(params)}'
                    yield Request(url=url, callback=self.parse_category)

    def parse_category(self, response):
        if response.status == 200:
            result = json.loads(response.text)
            if result['status'] is True:
                currentPage = result['data']['currentPage']
                totalPage = result['data']['totalPages']

                # 自动翻页
                if currentPage != totalPage:
                    params = {
                        'url': result['data']['categoryInfo']['url'],
                        'page': int(currentPage) + 1,
                    }
                    url = f'{self.list_product_url}/?{urllib.parse.urlencode(params)}'
                    yield Request(url=url, callback=self.parse_category)
