# -*- coding: utf-8 -*-
import hashlib

import scrapy

from scrapy import Request
from scrapy.crawler import CrawlerProcess
from scrapy.loader import ItemLoader
from scrapy.loader.processors import MapCompose
from scrapy.utils.project import get_project_settings
from urllib import parse

from jingdong.items import JingdongItem

class JdhomeSpider(scrapy.Spider):
    name = 'jdhome'
    allowed_domains = ['jd.com']
    start_urls = ['https://www.jd.com/']

    def parse(self, response):
        selector = response.xpath('//li[@class="cate_menu_item"]/a/@href').extract()
        for item in selector:
            yield Request(parse.urljoin(response.url, item), callback=self.parse_category)

    def parse_category(self, response):
        selector = response.xpath('//li[@class="title-name"]/a/@href').extract()
        for item in selector:
            yield Request(parse.urljoin(response.url, item), callback=self.parse_product,
                          meta={"page_no": 1})

    def parse_product(self, response):
        selector = response.xpath('//li[@class="gl-item"]/div/div[1]/a/@href').extract()
        page_no = response.meta["page_no"]
        end_page = 1
        if page_no == 1:
            fp_text = response.xpath('//span[@class="fp-text"]/i/text()').extract()
            if len(fp_text) > 0:
                end_page = int(fp_text[0])
        else:
            end_page = response.meta["end_page"]
        self.logger.info(page_no)
        if page_no <= end_page:
            yield Request(response.url + '&page=' + str(page_no), callback=self.parse_product, meta=dict(
                page_no=page_no + 1, end_page=end_page))
        for item in selector:
            yield Request(parse.urljoin(response.url, item), callback=self.parse_item)

    def parse_item(self, response):
        item_loader = ItemLoader(item=JingdongItem(), response=response)
        url = str(response.url)
        item_loader.add_value('url', url)
        md5 = hashlib.md5()
        md5.update(url.encode('utf8'))
        item_loader.add_value('url_md5', str(md5.hexdigest()))
        item_loader.add_xpath('product_name', '//div[@class="sku-name"]/text()', MapCompose(str.strip, str.title))
        item_loader.add_xpath('price', '//div[starts-with(@class, "price")]/text()', MapCompose(float))
        item_loader.add_xpath('shop_name', '//div[class="name"]/a/text()', MapCompose(str.strip))
        item = item_loader.load_item()
        return item


if __name__ == '__main__':
    process = CrawlerProcess(get_project_settings())
    process.crawl(JdhomeSpider.name)
    process.start()
