from selenium.webdriver import ChromeOptions
import pandas as pd
import pymysql
from bs4 import BeautifulSoup as bs
from selenium import webdriver

option=ChromeOptions()
option.add_argument('--headless')
chrome=webdriver.Chrome('d:\\selenium_webdriver\\chromedriver')

connect = pymysql.connect(
    host='localhost',
    port=3306,
    user='root',
    password='cp9595',
    database='wzry')
cur = connect.cursor()
url = 'https://pvp.qq.com/web201605/herolist.shtml'


def p1(url,chrome=chrome):
    global connect
    global cur
    chrome.get(url)
    page = bs(chrome.page_source, 'html.parser')
    sql1 = 'create table if not exists d1(name varchar(20),img varchar(200),href varchar(200))'
    sql2 = 'create table if not exists d2(num int,name varchar(20),skill_img varchar(200),' \
           'skill_name varchar(20),skill_cold varchar(80),skil_cost varchar(50),' \
           'skill_desc varchar(500))'
    sql3 = 'create table if not exists d3(name varchar(20),mw_img varchar(200),mw_desc varchar(50),mw_tips varchar(200))'
    sql4 = 'create table if not exists d4(name varchar(20),skin_name varchar(20),skin_src varchar(200))'
    cur.execute(sql1)
    connect.commit()
    cur.execute(sql2)
    connect.commit()
    cur.execute(sql3)
    connect.commit()
    cur.execute(sql4)
    connect.commit()
    d1 = []
    for i in page.select('ul.herolist > li > a'):
        name = i.select('img')[0].get('alt')
        img = i.select('img')[0].get('src')
        href = i.get('href')
        d1.append([name, img, href])
        sql_insert = 'insert into d1 values(%s,%s,%s)'
        cur.execute(sql_insert, [name, img, href])
        connect.commit()
    data = pd.DataFrame(d1, columns=['name', 'img', 'href'])
    for i in data.href:
        print(i)
        p2(i,chrome)


def p2(url,chrome):
    global cur
    global connect
    chrome.get('https://pvp.qq.com/web201605/'+url)
    page = bs(chrome.page_source, 'html.parser')
    skill_img = [i.get('src') for i in page.select('ul.skill-u1 > li > img')]
    skill_name = [i.text for i in page.select(
        'div.skill-show > div.show-list > p.skill-name > b')]
    skill_cold = [i.select('span')[0].text for i in page.select(
        'div.skill-show > div.show-list > p.skill-name')]
    skill_cost = [i.select('span')[1].text for i in page.select(
        'div.skill-show > div.show-list > p.skill-name')]
    skill_desc = [i.text for i in page.select('div.show-list > div.skill-tips')]
    hero = page.select('div.crumb > label')[0].text
    sql_insert = 'insert into d2 values(%s,%s,%s,%s,%s,%s,%s)'
    for i in range(len(skill_name)):
        cur.execute(sql_insert,
                    [str(i),
                     hero,
                     skill_img[i],
                        skill_name[i],
                        skill_cold[i],
                        skill_cost[i],
                        skill_desc[i]])
        connect.commit()
    mw_desc = [i.text for i in page.select('ul.sugg-u1 > li')]
    mw_img = [i.get('src') for i in page.select('ul.sugg-u1 > li > img')]
    mw_tips = page.select('div.sugg-info > p.sugg-tips')[0].text
    sql_insert = 'insert into d3 values(%s,%s,%s,%s)'
    for i in range(len(mw_desc)):
        cur.execute(sql_insert, [hero, mw_img[i], mw_desc[i], mw_tips])
        connect.commit()
    skin = page.select('ul.pic-pf-list > li')
    skin_name = [i.select('i > img')[0].get('data-title') for i in skin]
    skin_src = [i.select('i > img')[0].get('data-imgname') for i in skin]
    sql_insert = 'insert into d4 values(%s,%s,%s)'
    for i in range(len(skin)):
        cur.execute(sql_insert, [hero, skin_name[i], skin_src[i]])
        connect.commit()


p1(url,chrome)
