# -*- coding: utf-8 -*-
import scrapy
import json
from scrapy.loader import ItemLoader
from scrapy import Request
from scrapy.loader.processors import Join,MapCompose
import re
from bookcrawl.items import BookItem,LinkItem
re_isbn = re.compile("[iI][sS][bB][nN][:：]([0-9\-]+)")
re_star = re.compile("width:([0-9]+)")
re_id = re.compile("/([^/]+).html")

also_url = "http://product.dangdang.com/index.php?r=callback%2Frecommend&productId={id}&shopId=0&pageType=publish&module=&isBroad=true"
product_url = "http://product.dangdang.com/{pid}.html"
class DangdangSpider(scrapy.Spider):
    name = 'dangdang'
    allowed_domains = ['book.dangdang.com',"product.dangdang.com"]
    start_urls = ["http://product.dangdang.com/26909680.html",
                  "http://product.dangdang.com/1196766925.html",
                  "http://product.dangdang.com/1380728359.html",
                  "http://product.dangdang.com/1359991447.html",
                  "http://product.dangdang.com/25259304.html",
                  "http://product.dangdang.com/25289406.html",
                  "http://product.dangdang.com/24047407.html",
                  "http://product.dangdang.com/24030562.html",
                  "http://product.dangdang.com/24034002.html",
                  "http://product.dangdang.com/23643238.html",
                  "http://product.dangdang.com/25273831.html",
                  "http://product.dangdang.com/24021172.html",
                  "http://product.dangdang.com/23608680.html",
                  "http://product.dangdang.com/23761145.html",
                  "http://product.dangdang.com/1318968646.html",
                  "http://product.dangdang.com/26509352.html",
                  "http://product.dangdang.com/26912981.html",
                  "http://product.dangdang.com/26514290.html",
                  "http://product.dangdang.com/1268023699.html",
                  "http://product.dangdang.com/1176862114.html",
                  "http://product.dangdang.com/26481042.html",
                  "http://product.dangdang.com/26923464.html",
                  "http://product.dangdang.com/25350060.html",
                  "http://product.dangdang.com/1237507259.html",
                  "http://product.dangdang.com/1228221116.html",
                  "http://product.dangdang.com/1268023699.html",
                  "http://product.dangdang.com/26439903.html",
                  "http://product.dangdang.com/26910177.html",
                  "http://product.dangdang.com/26509260.html",
                  "http://product.dangdang.com/26316890.html",
                  "http://product.dangdang.com/1000202889.html",
                  "http://product.dangdang.com/1032591972.html",
                  "http://product.dangdang.com/1370351023.html",
                  "http://product.dangdang.com/1370351023.html",
                  "http://product.dangdang.com/26445826.html",
                  "http://product.dangdang.com/1075531797.html",
                  "http://product.dangdang.com/26922742.html",
                  "http://product.dangdang.com/26923332.html",
                  "http://product.dangdang.com/25205960.html",
                  "http://product.dangdang.com/26435639.html",
                  "http://product.dangdang.com/25307878.html",
                  "http://product.dangdang.com/1295886969.html",
                  "http://product.dangdang.com/1281646669.html",
                  "http://product.dangdang.com/23727542.html",
                  "http://product.dangdang.com/1394758640.html",
                  "http://product.dangdang.com/22925934.html",
                  "http://product.dangdang.com/26317584.html",
                  "http://product.dangdang.com/9314536.html",
                  "http://product.dangdang.com/26482572.html",
                  "http://product.dangdang.com/25267310.html",
                  ]
    custom_settings = {
        "DOWNLOAD_DELAY": 1,
        "CONCURRENT_REQUESTS_PER_DOMAIN": 1
    }

    def parse(self, response):
        loader = ItemLoader(item=BookItem(),response=response)
        loader.default_input_processor = MapCompose(str)
        loader.default_output_processor = Join(" ")

        base = response.xpath('//*[@id="detail_describe"]/ul/li/text()')
        base = [i.extract() for i in base]

        for i in base:
            match = re.search(re_isbn,i)
            if match is not None:
                loader.add_value("isbn",match.group(1))


        loader.add_xpath("title",'//*[@class="name_info"]/h1/@title')
        loader.add_xpath("author",'//*[@id="author"]/a/text()')
        loader.add_xpath("press",'//a[@dd_name="出版社"]/text()')

        star = response.xpath('//*[@class="star"]/@style').extract()
        if isinstance(star,list):
            star = "".join(star)
        star_match = re.search(re_star,star)
        if star_match is not None:
            loader.add_value("star",star_match.group(1))

        price = response.xpath('//*[@id="dd-price"]/text()[2]').extract()[0].strip()
        loader.add_value("price",price)
        loader.add_value("field","dangdang")
        loader.add_value("link",response.url)

        pid = re.search(re_id, response.url).group(1)
        loader.add_value("pid",pid)

        yield loader.load_item()

        relation = response.meta.get("relation",None)
        if relation is not None:
            rloader = ItemLoader(item=LinkItem())
            rloader.default_input_processor = MapCompose(str)
            rloader.default_output_processor = Join(" ")

            rloader.add_value("relation",relation)
            rloader.add_value("pid",response.meta["pid"])
            rloader.add_value("link_pid",pid)
            yield rloader.load_item()

        yield Request(also_url.format(id=pid), callback=self.parse_list, meta={"id":pid})

        # self.log(response.body)
        # pass

    def parse_list(self,response):
        jstr = json.loads(response.body_as_unicode())


        data = jstr["data"]
        also_buy_list = data["alsoBuy"]["list"]
        also_view_list = data["alsoView"]["list"]

        for buy in also_buy_list:
            pid = buy["productId"]
            yield Request(product_url.format(pid=pid), callback=self.parse, meta={"pid": pid,"relation":1})
        for view in also_view_list:
            pid = view["productId"]
            yield Request(product_url.format(pid=pid), callback=self.parse, meta={"pid": pid,"relation":2})

