#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Created on 2018-08-21 19:11:13
# Project: jd_seckill

from pyspider.libs.base_handler import *
from pyspider.database.mysql.toMySql import ToMySql
import re


class Handler(BaseHandler):
    def __init__(self):
        #在主页左边选择要爬的东西然后把链接复制下来
        self.url = 'https://list.jd.com/list.html?cat=670,677,679'
        self.startpage = 1
        self.endpage = 0
    #爬完改数值点run可以重新爬
    crawl_config = {
        'itag': 'v223'
    }
    #开始入口
    def on_start(self):
        self.crawl(self.url, callback=self.index_page, validate_cert=False)

    #爬去页数
    @config(age=5)
    def index_page(self, response):
        # 总页数
        self.endpage = int(response.doc('span[class^="p-skip"]').find('b').text())
        self.startpage += 1
        for i in range(1, self.endpage + 1):
            self.crawl(self.url+'&page=' + str(i), callback=self.detail_page,
                       validate_cert=False, fetch_type='js')

    @config(priority=2)
    def detail_page(self, response):
        # 遍历每页
        for each in response.doc('div[class^="gl-i-wrap"]').items():
            self.crawl(each.children('.p-name').children().attr.href, callback=self.deal_page, validate_cert=False,
                       save={'shopid': str(each.attr('data-sku')), 'venderid': each.attr('venderid')})

    @config(priority=2)
    def deal_page(self, response):
        #爬价格
        self.crawl('https://p.3.cn/prices/mgets?skuIds=J_' + response.save['shopid'], callback=self.deal_page1,
                   validate_cert=False, save={'shopid': response.save['shopid']}, age=60 * 60, auto_recrawl=True)
        #爬商品评价
        self.crawl('https://club.jd.com/comment/productCommentSummaries.action?referenceIds=' + response.save['shopid'],
                   callback=self.deal_page2, validate_cert=False)

        self.crawl('https://c0.3.cn/stock?skuId=' + response.save['shopid'] + '&area=19_1607_3639_0&venderId=' + response.save[
                'venderid'] + '&cat=670,677,679&extraParam={%22originid%22:%221%22}', callback=self.deal_page3,
            validate_cert=False, age=60 * 60, auto_recrawl=True)
        return {
            'Type': 'insert',
            'table': {
                'ID': response.save['shopid'],
                'Url': response.url,
                'Name': response.doc('div[class^="sku-name"]').text(),
            }
        }

    #page123可以合成一个函数。节省代码空间
    @config(priority=2)
    def deal_page1(self, response):
        if re.search('error', response.text):
            self.crawl(response.url, callback=self.deal_page1, validate_cert=False)
        else:
            print response.json[0]["p"]
            return {
                'ID': re.search('\d+', response.json[0]['id']).group(),
                'Type': 'update',
                'table': {
                    'Price': response.json[0]["p"],
                }
            }

    @config(priority=2)
    def deal_page2(self, response):
        try:
            return {
                'ID': response.json['CommentsCount'][0]['SkuId'],
                'Type': 'update',
                'table': {
                    # 'SecKill':response.doc('div[class^="activity-type"]').text(),
                    'AllPingJia': response.json['CommentsCount'][0]['CommentCountStr'],
                    'ShaiTu': response.json['CommentsCount'][0]['VideoCountStr'],
                    'ZhuiPin': response.json['CommentsCount'][0]['AfterCountStr'],
                    'HaoPing': response.json['CommentsCount'][0]['GoodCountStr'],
                    'ZhongPing': response.json['CommentsCount'][0]['GeneralCountStr'],
                    'ChaPing': response.json['CommentsCount'][0]['PoorCountStr'],
                }
            }
        except Exception, e:
            print 'Error:', e

    @config(priority=2)
    def deal_page3(self, response):
        try:
            return {
                'ID': response.json['stock']['realSkuId'],
                'Type': 'update',
                'table': {
                    # 'ID':response.json['stock']['realSkuId']
                    'ShopName': response.json['stock']['self_D']['vender'],
                    'self': '自营'
                }
            }
        except Exception, e:
            return {
                'ID': response.json['stock']['realSkuId'],
                'Type': 'update',
                'table': {
                    # 'ID':response.json['stock']['realSkuId']
                    'ShopName': response.json['stock']['D']['vender'],
                    'self': '非自营'
                }
            }
            print 'Error:', e

    def on_result(self, result):
        if not result:
            return
        kwargs = {'host': 'localhost',
                  'user': 'root',
                  'password': 'xxcaonima',
                  'db': 'jd',
                  'char': 'utf8'
                  }
        sql = ToMySql(kwargs)
        if result['Type'] == 'insert':
            sql.insert('showcard', **result)
        elif result['Type'] == 'update':
            sql.update('showcard', **result)

