# -*- utf-8 -*-
# author : li shi jie
# Email : yr785339493@qq.com

import requests
requests.urllib3.disable_warnings()
import pymysql
import json
import random
import re
import time
from itertools import product
class Commit_Spider:

    base_url = 'https://rate.tmall.com/list_detail_rate.htm?itemId={0}&spuId=883273605&sellerId=3308044329&order=3&currentPage={1}&append=0&content=1&tagId=&posi=&picture=&groupId=&ua=098%23E1hv89vZvLQvU9CkvvvvvjiPR2sZ6jE2Pszv0jEUPmPvljibP2FWzjtEP2qZ1jECRLItvpvhvvvvvUhCvCB4cZKUIr147Dum%2FJwG4HAT7dcNh46CvvyvvvZmXeOvxbQjvpvj7DdNzYAaz2yCvvpvvvvvCQhvCli4zYMwcZKrvpvEvCm59FWsvC1E3QhvCvvhvvvEvpCW2jXFvvaw1WBlY2Kz8Z0vQbmxdX3z8SoxfwpOdeQEfwLvaXgXwymQD7zh58tYE57QD70Oe369D7zhz8TcwyNO%2BExreEyaUWFZlEkAdfyCvm9vvvvvphvvvvvv96CvpvQ5vvm2phCvhRvvvUnvphvppvvv96CvpCCvkphvC99vvOC0LTyCvv9vvUvsqQz%2B%2FvhCvvOvCvvvphvPvpvhMMGvv2yCvvpvvvvviQhvCvvv9UUjvpvj7DdNzYsYlFyCvvpvvvvvRphvCvvvvvmrvpvEvvFgmsQGvE2b&needFold=0&_ksTS=1540549141480_872&callback=jsonp873'
    headers = {
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'cookie': 'hng=CN%7Czh-CN%7CCNY%7C156; cna=8goBFBm4QAMCAX15JGtF6JcY; lid=evil5214; otherx=e%3D1%26p%3D*%26s%3D0%26c%3D0%26f%3D0%26g%3D0%26t%3D0; tk_trace=1; t=de00596301fff7361c428d2a1094c717; tracknick=evil5214; lgc=evil5214; _tb_token_=356b3f3de583b; cookie2=1e632e5f48227167b15ace0150f7d4b5; uc1=cookie16=Vq8l%2BKCLySLZMFWHxqs8fwqnEw%3D%3D&cookie21=WqG3DMC9FxUx&cookie15=URm48syIIVrSKA%3D%3D&existShop=false&pas=0&cookie14=UoTYNkL%2BQo1ttQ%3D%3D&tag=8&lng=zh_CN; uc3=vt3=F8dByRjKAHl2k6zIdmk%3D&id2=UoH8WASUrKk1qQ%3D%3D&nk2=BuUIbFdbzlY%3D&lg2=URm48syIIVrSKA%3D%3D; ck1=""; csg=9167c301; skt=ebc7ada28b4fa02c; x=__ll%3D-1%26_ato%3D0; _m_h5_tk=d7d42a17ab1b52c5dbb6384963f322f1_1540540919242; _m_h5_tk_enc=b666a1287dd2ecf0fb8f378c442fb2a6; whl=-1%260%260%260; isg=BGpq1Z_mxff6rElKnQ9Noz8Du9DMc--ZDnXNA_QiyrxKJwnh3G-7RycWszOe12bN',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36',
    }

    def remove_emoji(desstr, restr=''):
        '''
        过滤表情
        '''
        try:
            co = re.compile(u'[\U00010000-\U0010ffff]')
        except re.error:
            co = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]')
        return co.sub(restr, desstr)
    @classmethod
    def parse(self, itemid, page, title):
        time.sleep(random.randint(1, 4))
        response = requests.get(self.base_url.format(itemid, page), headers=self.headers, verify=False)
        html = response.text
        jsondata = re.search('^[^(]*?\((.*)\)[^)]*$', html).group(1)
        # 用json加载数据
        data = json.loads(jsondata)
        # 数据保存在变量里
        conn = pymysql.connect(host='localhost', port=3306, user='root', passwd='123456', db='taobao', charset='utf8')
        cur = conn.cursor()
        # 连接mysql
        data_list = data.get('rateDetail').get('rateList')
        if data_list:
            print('连接成功!!!!')
            for item in data_list:
                # print(str(i + 1) + data['rateDetail']['rateList'][i])
                name = item.get('displayUserNick')
                # 处理过的用户名
                content = self.remove_emoji(item.get('rateContent'))
                # 当天评论
                today_time = item.get('rateDate')
                # 评论日期
                fordearmetype = item.get('auctionSku')
                # 机型
                appenddays = item.get('appendComment').get('days') if item.get('appendComment') else 0
                # 追加评论的天数
                appendtime = item.get('appendComment').get('commentTime') if item.get('appendComment') else ''
                # 追加评论的时间
                acontent = item.get('appendComment').get('content') if item.get('appendComment') else ''
                appendcontent = self.remove_emoji(acontent)
                # 追加评论的内容
                itemid = itemid
                title = title
                cur.execute(
                    "insert into comments(name,content,today_time,fordearmetype,appenddays,appendtime,appendcontent,itemid,title) values (\"%s\",\"%s\",\"%s\",\"%s\",%d,\"%s\",\"%s\",\"%s\",\"%s\")" % (
                    str(name), str(content), str(today_time), str(fordearmetype), int(appenddays), str(appendtime), str(appendcontent),str(itemid),str(title)))
                conn.commit()
            print(str(itemid)+str(page) + "页数据已经保存")
        else:
            print(str(itemid)+'为空')


if __name__ == '__main__':
    f = open('item.csv', 'r', encoding='utf-8')
    html = f.readlines()
    range = range(50, 100)
    for item in product(html[45:], range):
        # print(item[0], item[1])
        dict_item = json.loads(item[0].strip())
        item_id = dict_item.get('item_id')
        title = dict_item.get('title')
        page = item[1]

        Commit_Spider.parse(item_id, page, title)

