# -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule

from scrapy_jd.items import ScrapyJdItem
from scrapy_jd.utils import *


class JdSpider(CrawlSpider):
    name = 'jd'
    allowed_domains = ['jd.com']
    start_urls = ['http://jd.com/']

    rules = (
        Rule(LinkExtractor(allow=r"https://item.jd.com/\d+.html"), callback="parse_item", follow=True),
        Rule(LinkExtractor(allow=r"https://list.jd.com/list.html?cat=(\d+)"), callback="parse_item", follow=True),
    )

    def start_requests(self):
        urls = [
            "https://jiadian.jd.com",
            "https://shouji.jd.com/",
            "https://wt.jd.com",
            "https://shuma.jd.com/",
            "https://diannao.jd.com/",
            "https://bg.jd.com",
            "https://channel.jd.com/home.html",
            "https://channel.jd.com/furniture.html",
            "https://jzjc.jd.com/",
            "https://channel.jd.com/kitchenware.html",
            "https://channel.jd.com/1315-1342.html",
            "https://channel.jd.com/1315-1343.html",
            "https://phat.jd.com/10-156.html",
            "https://channel.jd.com/1315-1345.html",
            "https://beauty.jd.com/",
            "https://channel.jd.com/beauty.html",
            "https://channel.jd.com/pet.html",
            "https://phat.jd.com/10-184.html",
            "https://phat.jd.com/10-183.html",
            "https://channel.jd.com/watch.html",
            "https://channel.jd.com/jewellery.html",
            "https://phat.jd.com/10-185.html",
            "https://phat.jd.com/10-109.html",
            "https://phat.jd.com/10-272.html",
            "https://xinfang.jd.com/",
            "https://car.jd.com/",
            "https://che.jd.com/",
            "https://baby.jd.com",
            "https://toy.jd.com/",
            "https://food.jd.com/",
            "https://jiu.jd.com",
            "https://fresh.jd.com",
            "https://china.jd.com",
            "https://art.jd.com",
            "https://channel.jd.com/1672-2599.html",
            "https://nong.jd.com",
            "https://health.jd.com",
            "https://channel.jd.com/9192-9196.html",
            "https://book.jd.com/",
            "https://mvd.jd.com/",
            "https://education.jd.com",
            "https://e.jd.com/ebook.html",
            "https://jipiao.jd.com/",
            "https://hotel.jd.com/",
            "https://trip.jd.com/",
            "https://ish.jd.com/",
            "https://licai.jd.com/",
            "https://z.jd.com/",
            "https://baitiao.jd.com",
            "https://bao.jd.com/",
            "https://anzhuang.jd.com",
            "https://jdwx.jd.com",
            "https://cleanclean.jd.com",
            "https://2.jd.com/",
            "https://imall.jd.com/"
        ]
        for url in urls:
            yield scrapy.Request(url)

    def parse_item(self, response):
        """
        :param response:
        :return: ScrapyJdItem
        """
        name = response.xpath("//*[@id=\"detail\"]/div[2]/div[1]/div[1]/ul[2]/li[1]/@title").extract_first()
        num = response.xpath("//*[@id=\"detail\"]/div[2]/div[1]/div[1]/ul[2]/li[2]/@title").extract_first()
        url = response.request.url
        brand = response.xpath("//*[@id=\"parameter-brand\"]/li/@title").extract_first()
        desc = response.xpath("//*[@id=\"detail\"]/div[2]/div[1]/div[1]/ul[2]")[0].xpath(
            "string(.)").extract_first().strip().replace("\xa0", "").replace("\n", " ").replace("  ", " ")
        seller = response.xpath("//*[@id=\"crumb-wrap\"]/div/div[2]/div[2]/div[1]/div/a/@title").extract_first()
        ware_id = get_ware_id(url)
        price = get_price(ware_id)
        comment_num, praise = get_comment_num(ware_id)

        item = ScrapyJdItem()
        item["name"] = name
        item["num"] = num
        item["url"] = url
        item["brand"] = brand
        item["desc"] = desc
        item["seller"] = seller
        item["price"] = price
        item["comment_num"] = comment_num
        item["praise"] = praise

        yield item
