# encoding = utf-8
import concurrent
import datetime
import os
import time
from concurrent.futures import ThreadPoolExecutor
import ssl
import pymysql
import json
import requests
from bs4 import BeautifulSoup

url_pic = "https://fitment.chenyoujiu.com"
db = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='zhou12345', db='test', charset='utf8')
cursor = db.cursor()
picture_dir = "E:/Picture"


def header():
    headers = {
        'Host': 'fitment.chenyoujiu.com',
        'Pragma': 'no-cache',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6',
        'Cache-Control': 'no-cache',
        'Connection': 'keep-alive',
        'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36 MicroMessenger/7.0.9.501 NetType/WIFI MiniProgramEnv/Windows WindowsWechat',
    }
    return headers


# 获取分类信息
def get_page_urls(pid):
    try:
        response = requests.post('https://fitment.chenyoujiu.com/productApi/getProductCatList',
                                 data={'token': '6b0f0c96711fdd4666303468750196bd'}, headers=header(), verify=False)
        if response.status_code == 200:
            # 解析分类信息内容
            print('解析开始')
            analysis_json(response.content, pid)
            return None
    except requests.RequestException:
        return None


# 解析分类信息内容
def analysis_json(json_data, pid):
    my_data = json.loads(json_data)
    product_cats = my_data.get('result').get('productCats')
    # 开始解析
    for item in product_cats:
        # 保存主分类信息
        # insert(item, 0)
        # 保存主分类图片
        # save_picture_info(item['img_url'])
        # print(item['id'])
        if pid == int(item['id']):
            print("标题名称：" + item['title'])
            for item1 in item['children']:
                print("分类名称："+item1['title'])
                # 保存子分类信息
                # insert(item1, int(item['id']))
                # 获取该分类下的所有图片
                get_page_data(item1['id'])


# 保存分类信息
def insert(item, pid):
    sql = "INSERT INTO productcat VALUES(%d,'%s','%s','%s',%d)"
    data = (int(item['id']), item['title'], item['zindex'], item['img_url'], pid)
    cursor.execute(sql % data)
    db.commit()


# 获取该分类下的所有图片
def get_page_data(id_pic):
    data = {'token': '6b0f0c96711fdd4666303468750196bd', 'page_size': 15, 'id': id_pic, 'cur_page': 0}
    try:
        response = requests.post('https://fitment.chenyoujiu.com/productApi/getProductCatInfo',
                                 data=data, headers=header(), verify=False)
        if response.status_code == 200:
            page_data = json.loads(response.content)
            count = 1
            while count <= page_data.get('result')['paginate']['total_page']:
                get_picture(id_pic, count)
                count = count + 1
            return None
    except requests.RequestException:
        print("分类信息总页数获取错误：" + "分类ID=" + id_pic)
        return None


def get_picture(id_pic, page_index):
    data = {'token': '6b0f0c96711fdd4666303468750196bd', 'page_size': 15, 'id': id_pic, 'cur_page': page_index}
    try:
        response = requests.post('https://fitment.chenyoujiu.com/productApi/getProductCatInfo',
                                 data=data, headers=header(), verify=False)
        if response.status_code == 200:
            page_data = json.loads(response.content)
            picture_data = page_data.get('result')['product']
            for item in picture_data:
                # print("插入图片数据")
                insert_picture(item)
                # 保存图片产品信息
                picture_data = item['attr']['attr_pic'][0]
                save_picture_info(picture_data['finished_pic'])
                save_picture_info(picture_data['thumbnail_url'])
                save_picture_info(picture_data['url'])

            return None
    except requests.RequestException:
        print("分类信息数获取错误：" + "分类ID=" + id_pic + ";当前页数=" + page_index)
        return None


# 保存图片信息
def save_picture_info(item):
    if len(item) == 0 or item.isspace() == True:
        print("请求的图片路径为空")
        return None
    keep = True
    maxtimes = 3
    count = 0
    p_items = item.split('/')
    pic_dir_name = picture_dir + item
    pic_dir = picture_dir + item.replace('/' + p_items[len(p_items) - 1], '')
    if not os.path.exists(pic_dir):  # 如果不存在路径，则创建这个路径，关键函数就在这两行，其他可以改变
        os.makedirs(pic_dir)
    while keep and count < maxtimes:
        try:
            with requests.Session() as s:
                print(url_pic + item)
                response = s.get(url_pic + item, headers=header(), timeout=(20, 40))
                keep = False
                if response.status_code == 200:
                    page_data = response.content
                    # print("开始保存")
                    with open(pic_dir_name, 'wb') as f:
                        f.write(page_data)

                        print("成功")
                return None
        except requests.exceptions.ReadTimeout:
            time.sleep(10)
            count = count + 1
            print("读取超时异常，第" + str(count) + "次尝试。" + url_pic + item)
        except requests.exceptions.ConnectTimeout:
            print("ConnectTimeout，获取图片资源出错(链接超时)：" + url_pic + item)
        except requests.exceptions.Timeout:
            print("Timeout，获取图片资源出错（超时）：" + url_pic + item)
        except requests.RequestException:
            time.sleep(10)
            count = count + 1
            print("RequestException，第" + str(count) + "次尝试。" + url_pic + item)
    # 走到这里就是重试三次都失败了
    print("获取图片资源出错（请求超时）：" + url_pic + item)


def get_url4():
    Headers = {
        'content-type': 'application/json',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.108 Safari/537.36'
    }

    url = "https://www.google.com.hk"

    keep = True
    maxtimes = 3
    count = 0
    print(datetime.datetime.now())
    while keep and count < maxtimes:
        try:
            res = requests.get(url=url, headers=Headers, timeout=5)
            keep = False
        except Exception as e:
            print(datetime.datetime.now())
            # 延时10s后重试
            time.sleep(10)
            count = count + 1
            print('重试' + str(count))


# 保存图片产品信息
def insert_picture(item):
    sql = "INSERT INTO productcatinfo VALUES(%d,'%s',%d,%d)"
    data = (int(item['pid']), item['product_title'], int(item['category_id']), int(item['sort']))
    cursor.execute(sql % data)
    db.commit()

    dql_pic = "INSERT INTO productattr VALUES(%d,'%s','%s',%d,%d,%f,'%s','%s','%s',%d)"
    attr_pic = item['attr']['attr_pic'][0]
    data_pic = (
        int(attr_pic['id']), attr_pic['pic_title'], attr_pic['url'], int(attr_pic['width']), int(attr_pic['height']),
        float(attr_pic['price']), attr_pic['spec'], attr_pic['finished_pic'], attr_pic['thumbnail_url'],
        int(item['pid']))
    cursor.execute(dql_pic % data_pic)
    db.commit()

    # print("保存数据到数据库成功")


if __name__ == '__main__':
    print("Hello, Python!")
    get_page_urls(986)
    """
    # 获取分类信息
    # get_page_urls()
    error_list = ['/uploads/images/2020/12/04/5fa95259af90947541109417fcd6c4c8.jpg',
                  '/uploads/images/2020/12/04/c5d93d9bb4598aa4e88d35c7d80f4cd8.jpg',
                  '/uploads/pic/2020/11/11/d83a2a141f05ed128656cf57a3b7cedf.png',
                  '/uploads/pic/2020/12/04/a521c26112741387d59dfd8df87b4769_thumb.png',
                  '/uploads/pic/2020/12/04/3be6db444c6c99b028f7ad8bd9f2c83b.png',
                  '/uploads/images/2020/12/04/61a5116509c8bff050a6c9637317a76c.jpg',
                  '/uploads/pic/2020/12/04/57a3560b691e8558f68c4e7d52761e05_thumb.png',
                  '/uploads/images/2020/12/04/08bd9a8ad3dabab8df10ce0405efdec7.jpg',
                  '/uploads/pic/2020/12/04/b9f9e9a12f8afa03ce9d665687006fe7_thumb.png',
                  '/uploads/pic/2020/12/04/2d2f1c233727c0e3b3c4527e5dc3c814.png',
                  '/uploads/images/2020/12/04/48b23191d3d848ae63af2d4e6f9fe7be.jpg',
                  '/uploads/pic/2020/12/04/cf70458714c0ab3a8a0f9fcdbd5b6a61_thumb.png',
                  '/uploads/images/2020/12/04/c97207c07e44150f5843856696617489.jpg',
                  '/uploads/pic/2020/12/04/0453f801d10fa92ab02e368cb0095ebe.png',
                  '/uploads/images/2020/12/05/c9a70e159dd54bdf6eac25231cd5adad.jpg',
                  '/uploads/images/2020/12/05/6a12f55577d7b894334ab2ded58b3aae.jpg', ]
    #   for i in error_list:
        #  save_picture_info(i)

    get_page_data(16197)
    """
