import scrapy
import redis
import json
import re
import random

from jd.items import JDItem
from jd.settings import CATEGORY
from jd.model.phone import Phone

class jdphone(scrapy.Spider) :
    name = 'phone'

    redis_host = '****'
    redis_port = 6006
    redis_key = 'skuids'

    # basepriceurl = 'https://p.3.cn/prices/mgets?skuIds=J_'
    basecommenturl = "https://sclub.jd.com/comment/productPageComments.action?score=0&sortType=5&page=0&pageSize=10&isShadowSku=0&fold=1&productId="

    def __init__(self, cat, full) :
        self.cat = int(cat)  #0， 1， 2， 3， 4
        self.full = int(full)  #isfull 0 1
        r = self.getRedis()
        r.delete(self.redis_key)

    def start_requests(self) :
        print(CATEGORY.get(self.cat))
        if CATEGORY.get(int(self.cat)) == None :
            return;
        url = "https://list.jd.com/list.html?cat=%s&page=1&delivery=1&stock=0&sort=sort_totalsales15_desc&trans=1&JL=4_2_0#J_main" % (CATEGORY.get(self.cat))
        yield scrapy.Request(url = url, callback = self.parse_page)

    def parse_page(self, response) :
        self.logger.info(response.url)
        all = response.xpath('//div[@class="gl-i-wrap j-sku-item"]/div[@class="p-img"]/a/@href').extract()
        for one in all :
            oneurl = response.urljoin(one)
            yield scrapy.Request(url = oneurl, callback = self.parse_one)
        next_page = response.xpath('//a[@class="pn-next"]/@href').extract_first()
        if next_page is not None :
            next_page = response.urljoin(next_page)
            yield scrapy.Request(url = next_page, callback= self.parse_page)

    def parse_one(self, response) :
        self.logger.info(response.url)
        r = redis.StrictRedis(host='10.95.136.144', port=6006, db=0)
        skuid = re.findall(r"\d+", response.url)
        if skuid == None or type(skuid) != list :
            self.logger.error(response.url)
            return
        skuid = int(skuid[0])
        if r.sadd(self.redis_key, skuid) == 0 :
            return
        if Phone.isExisted(skuid) and self.full == 0 :
            return
        skuname = response.xpath('//div[@class="sku-name"]/text()').extract_first().strip()
        skushop = response.xpath('//div[@class="name"]/a/text()').extract_first()
        if skushop == None :
            skushop = ''
        else :
            skushop = skushop.strip()
        dds = response.xpath('//div[@class="Ptable"]//dd/text()').extract()
        dds = self.deal(dds)
        dts = response.xpath('//div[@class="Ptable"]//dt/text()').extract()
        l = len(dds)
        detail = dict()
        for i in range(0, l) :
            detail[dts[i]] = dds[i]
        detail = json.dumps(detail, ensure_ascii=False)
        item = JDItem()
        item['skuid'] = skuid
        item['url'] = response.url
        item['skuname'] = skuname
        item['skushop'] = skushop
        item['detail'] = detail
        item['category'] = self.cat

        # yield scrapy.Request(url = self.basepriceurl+str(skuid) + "&pduid=" + str(int(random.random() * 10000000000)), callback = self.parse_price, meta = {'item': item})
        yield scrapy.Request(url=self.basecommenturl + str(item['skuid']), callback=self.parse_comment, meta={'item': item})
        colorSize = re.findall(r"(?<=colorSize: )\[.*?\]", response.body.decode('gbk', errors='ignore'))
        if colorSize == [] :
            return
        colorSize = colorSize[0]
        colorSize = json.loads(colorSize)
        # print(colorSize)
        for one in colorSize :
            skuid = one.get('skuId')
            if skuid == None :
                continue
            if r.sismember(self.redis_key, skuid) :
                continue
            else :
                yield scrapy.Request(url="https://item.jd.com/" + str(skuid) + ".html", callback=self.parse_one)

    # def parse_price(self, response) :
    #     self.logger.info(response.url)
    #     item = response.meta['item']
    #     rst = json.loads(response.text)
    #     if len(rst) == 0 or type(rst) != list :
    #         self.logger.error(response.url)
    #         yield scrapy.Request(url=self.basecommenturl + str(item['skuid']), callback=self.parse_comment,
    #                              meta={'item': item})
    #     price = rst[0].get('p')
    #     if price != None :
    #         item['price'] = price
    #
    #     yield scrapy.Request(url=self.basecommenturl + str(item['skuid']), callback=self.parse_comment, meta = {'item': item})

    def parse_comment(self, response) :
        self.logger.info(response.url)
        item = response.meta['item']
        rst = json.loads(response.text)
        if rst.get('comments') != None :
            rst.pop('comments')
        item['comment'] = json.dumps(rst, ensure_ascii=False)
        yield item

    def deal(self, dds) :
        rdds = list()
        for dd in dds :
            if dd.strip() == '' :
                continue
            rdds.append(dd)
        return rdds

    def getRedis(self) :
        r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0)
        return r
