# -*- coding: utf-8 -*-
# 分类爬虫

from scrapy import Spider, Request
from pyquery import PyQuery as pq
from ..items import *

import random
import time
import json


class DlxcSpider(Spider):
    name = "dlxc"
    allowed_domains = ["hqchip.com"]
    start_urls = ["http://www.hqchip.com/glist/all_cate.html",]
    base_url = "http://www.hqchip.com%s"
    i = 0

    def parse(self, response):
        time.sleep(random.random())
        self.delay_time()

        if "all_cate" in self.start_urls[0]:
            for o_cat in self.parse_cat_item(response):
                yield o_cat

        for dl in pq(response.text)(".main-list dl"):
            yield Request(self.base_url % pq(dl).find("h2 a").attr("href"),
                    callback=self.parse_cat)

            for dd in pq(dl).find("dd a"):
                yield Request(self.base_url % pq(dd).attr("href"),
                        callback=self.parse_cat)
    # ------------------------------------------ End def parse()

    def parse_cat(self, response):
        time.sleep(random.random())
        self.delay_time()
        text = pq(response.text)

        # 下一页
        for page in text(".page-next"):
            yield Request(self.base_url % pq(page).attr("href"),
                    callback=self.parse_cat)

        # 条件选择器
        if "_" not in response.url:
            p_name = response.url.split("/")[-1]
            dul = text(".filter-sel-area ul li")
            if len(dul)==2:
                for a0 in pq(dul[0]).find("a"):
                    yield PackageItem(name=pq(a0).text(), cat_slug=p_name)

                for a1 in pq(dul[1]).find("a"):
                    yield ManufacturerItem(name=pq(a1).text(), cat_slug=p_name)

        else:
            p_name = response.url.split("/")[-1].split('_')[0]

        # 商品链接
        for pd in text("table.filter-list-res div.g-text-overflow a"):
            yield Request(self.base_url % pq(pd).attr("href"),
                    meta={'cat_slug': p_name}, callback=self.parse_product)
    # ------------------------------------------ End def parse_cat()

    def parse_product(self, response):
        time.sleep(random.random())
        self.delay_time()
        text = pq(response.text)
        _json = json.loads(text("#numbers_list").val())

        model = text("h1").text()
        dtt = text(".dt-des").text().split(" - ")
        yield ProductItem(model=model,
                cat_slug=response.meta['cat_slug'],
                parent_model=dtt[-1] if len(dtt)==2 else "",
                url=response.url)

        for spuu in text("ul.dt-select-supp li"):
            sli = pq(spuu)
            spuu_name = sli.attr("class")

            yield SupplierItem(model=model,
                    name=spuu_name,
                    logo=self.base_url % sli.find("em").attr("style")[:-1].split("(")[-1])

            for spuu_json in _json.get(spuu_name).get("list").values():
                goods_id = str(spuu_json.get("goods_id"))

                yield StockItem(supplier=spuu_name,
                        model=model,
                        manufacturer=spuu_json.get("provider_name"),
                        goods_id=goods_id,
                        quantity=str(spuu_json.get("goods_number")),
                        minimum=str(spuu_json.get("min_buynum")))

                for price in spuu_json.get("json_price"):
                    try:
                        yield PriceItem(goods_id=goods_id,
                            quantity=str(price[0]),
                            hk_price=str(price[2]),
                            internal_price=str(price[3]))
                    except:
                        pass

                attr_url = "/product/ajaxattr.html?goods_id=%s" % goods_id
                yield Request(self.base_url % attr_url,
                        callback=self.parse_param_item)

    # ------------------------------------------ End def parse_product()

    def parse_cat_item(self, response):
        for dl in pq(response.text)(".main-list dl"):
            ph2 = pq(dl).find("h2 a")
            ph2_url = ph2.attr("href")

            pcat = CatItem(name=ph2.text(),
                    slug=ph2_url[1:],
                    url=self.base_url % ph2_url)

            yield pcat

            for dd in pq(dl).find("dd a"):
                dd_url = pq(dd).attr("href")
                cat = CatItem(name=pq(dd).text(), 
                        slug=dd_url.split("/")[-1], 
                        parent_slug=pcat.get('slug', ""),
                        url=self.base_url % dd_url)

                yield cat
    # ------------------------------------------ End def parse_item()

    def parse_param_item(self, response):
        g_id = response.url.split("=")[-1]
        j = json.loads(response.text)
        return ParamItem(goods_id=str(g_id), param=j.get("msg").replace('"', '\\"'))

    # ------------------------------------------ End def parse_param_item()

    def delay_time(self):
        if self.i > 500:
            time.sleep(random.randint(600, 1000))
            self.i = 0
        else:
            self.i += 1

    # ------------------------------------------ End def delay_time()
# -------------------------------------------------- End class CatSpider


# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4
