import requests
import re
import bs4
from bs4 import BeautifulSoup
import pymysql
import json
import configparser
import lxml

header = {
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36'
}


def login_mysql():
    #获取配MySQL的用户置文件并连接
    config=configparser.ConfigParser()
    config.read('config.ini')
    host=config.get('MYSQL','host')
    port=config.get('MYSQL','port')
    user = config.get('MYSQL', 'user')
    password = config.get('MYSQL', 'password')
    database = config.get('MYSQL', 'database')
    db = pymysql.connect(host=host,port=int(port),user=user,password=password,database=database)
    cursor=db.cursor()
    #新建表job_data
    sql1='drop table if exists commodity_data'
    sql2='drop table if exists ph_info'
    sql3='''create table commodity_data(commodity_id varchar(30),commodity_name varchar(300),shop_name varchar(100),commodity_price varchar(50),commodity_url varchar(300))'''
    sql4='''create table ph_info(commodity_id varchar(30),ph_name varchar(100),ph_weight varchar(100),ph_cpu varchar(100),ph_ram varchar(50),ph_rom varchar(50),ph_microSD varchar(100),ph_camera varchar(100),ph_camera_front varchar(100),ph_camera_back varchar(50),ph_screen varchar(50),ph_resolution varchar(100),ph_scree_type varchar(100),ph_share varchar(100),ph_battery varchar(100),ph_charger varchar(100),ph_system varchar(100),commodity_pic varchar(300))'''
    try:
        cursor.execute(sql1)
        cursor.execute(sql2)
        cursor.execute(sql3)
        cursor.execute(sql4)
        db.commit()
    except Exception as e:
        print(e)
        db.rollback()
    return db,cursor


def get_list():
    # 非法URL
    invalidLink='javascript:;'
    result = []
    _url = 'https://list.jd.com/list.html?cat=9987,653,655&page={p}&delivery=0&stock=0&sort=sort_rank_asc&trans=1&JL=4_10_0#J_main'
    for i in range(1,178): #爬取的页数
        url = _url.format(p = str(i))
        session = requests.session()
        session.headers = header
        res = session.get(url=url,timeout=20)
        res.encoding = res.apparent_encoding
        html = res.text
        soup = BeautifulSoup(html,'lxml')
        # print(soup)
        _result = soup.find('ul',class_='gl-warp clearfix').find_all('a')
        for k in _result:
            try:
                link = k.get('href')
            except:
                link = ''
            if link is not None:
                #过滤非法链接
                if link == invalidLink:
                    pass
                elif 'https:' + link in result:
                    pass
                else:
                    print('https:' + link)
                    result.append('https:' + link)
    return result


def deal_with_info(result):
    # 处理商品数据
    info = result
    ph_name = ''
    ph_weight = ''
    ph_cpu = ''
    ph_ram = ''
    ph_rom = ''
    ph_microSD = ''
    ph_camera = ''
    ph_camera_front = ''
    ph_camera_back = ''
    ph_screen = ''
    ph_resolution = ''
    ph_scree_type = ''
    ph_share = ''
    ph_battery = ''
    ph_charger = ''
    ph_system = ''
    for i in info:
        if '商品名称' in i.text:
            ph_name = i.get('title')
        elif '商品毛重' in i.text:
            ph_weight = i.get('title')
        elif 'CPU型号' in i.text:
            ph_cpu = i.get('title')
        elif '运行内存' in i.text:
            ph_ram = i.get('title')
        elif '机身存储' in i.text:
            ph_rom = i.get('title')
        elif '存储卡' in i.text:
            ph_microSD = i.get('title')
        elif '摄像头数量' in i.text:
            ph_camera = i.get('title')
        elif '前摄主摄像素' in i.text:
            ph_camera_front = i.get('title')
        elif '后摄主摄像素' in i.text:
            ph_camera_back = i.get('title')
        elif '主屏幕尺寸' in i.text:
            ph_screen = i.get('title')
        elif '分辨率' in i.text:
            ph_resolution = i.get('title')
        elif '屏幕前摄组合' in i.text:
            ph_scree_type = i.get('title')
        elif '屏占比' in i.text:
            ph_share = i.get('title')
        elif '电池容量' in i.text:
            ph_battery = i.get('title')
        elif '充电器' in i.text:
            ph_charger = i.get('title')
        elif '操作系统' in i.text:
            ph_system = i.get('title')
        else:
            pass
    return ph_name, ph_weight, ph_cpu, ph_ram, ph_rom, ph_microSD, ph_camera, ph_camera_front, ph_camera_back, ph_screen, ph_resolution, ph_scree_type, ph_share, ph_battery, ph_charger, ph_system


def save_data(href_list):
    urls=href_list
    total = len(urls)
    count = 1
    for url in urls:
        print('正在爬取：' +str(count) + '/' + str(total))
        count += 1
        try:
            commodity_id = re.sub(r'\D', "", url) # 商品编号
            session = requests.session()
            session.headers = header
            priceUrl = 'https://p.3.cn/prices/mgets?skuIds={}'.format(commodity_id)
            priceres = session.get(url=priceUrl)
            jsons = json.loads(priceres.text[0:-1])
            commodity_price = jsons[0]['p'] # 商品价格
            res = session.get(url=url,timeout=20)
            res.encoding = res.apparent_encoding
            html = res.text
            soup = BeautifulSoup(html,'lxml')
            # print(soup)

            result_1 = soup.find('div',class_='jqzoom main-img').find_all('img')[0]
            commodity_name = result_1.get('alt') # 商品名称
            commodity_pic = 'https:' + result_1.get('data-origin') # 商品图片
            shop_name = soup.find('div',class_='J-hove-wrap EDropdown fr').find('div',class_='name').text
            result_2 = soup.find('ul',class_='parameter2 p-parameter-list').find_all('li')
            ph_name, ph_weight, ph_cpu, ph_ram, ph_rom, ph_microSD, ph_camera, ph_camera_front, ph_camera_back, ph_screen, ph_resolution, ph_scree_type, ph_share, ph_battery, ph_charger, ph_system = deal_with_info(result_2)
            print(commodity_id,commodity_name,shop_name,commodity_price,url)
            sql1="insert into commodity_data values('{}','{}','{}','{}','{}')".format(commodity_id, commodity_name, shop_name, commodity_price, url)
            sql2="insert into ph_info values('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}')" \
            .format(commodity_id,ph_name,ph_weight,ph_cpu,ph_ram,ph_rom,ph_microSD,ph_camera,ph_camera_front,ph_camera_back,ph_screen,ph_resolution,ph_scree_type,ph_share,ph_battery,ph_charger,ph_system,commodity_pic)
            try:
                cursor.execute(sql1)
                cursor.execute(sql2)
                db.commit()
                print('成功')
            except Exception as e:
                print(e)
                db.rollback()
        except:
            pass
    cursor.close()
    db.close()


if __name__=='__main__':
    href_list=get_list()
    db,cursor=login_mysql()
    save_data(href_list)