import requests
from bs4 import BeautifulSoup
import pymysql

'''爬不带有网盘数据'''
class PicSpider(object):
    header = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36',
    }
    conn = pymysql.connect(
        host='localhost', user='root', password="admin",
        database='python', port=3306,
        charset='utf8'
    )
    cursor = conn.cursor()

    def __init__(self,formName="win10_pic",sqlSelect="win10_pic_type"):
        self.url="" #http://moe.005.tv/75059.html_
        self.sql="insert into "+formName+"(pic_name,pic_href) values(%s,%s)" #"insert into video_type(name,href) values(%s,%s)"
        self.sqlSelect=sqlSelect
    def start(self):
        for type in self.__selectType():
            self.url = type
            self.__analysis(self.__fetch_content())

    def __selectType(self):
        sql_select = "select * from "+self.sqlSelect
        self.cursor.execute(sql_select)
        dd=[]
        for i in range(0,200):
            rs = self.cursor.fetchone()
            dd.append(rs[2])
            # print(dd)
        return dd
    def __fetch_content(self):
        resObj=requests.get(self.url)
        resObj.encoding="utf-8"
        html=resObj.text
        return html

    def __analysis(self,html):
        soup = BeautifulSoup(html, 'lxml')
        lists=soup.findAll('div',class_='content_w_box')[1]
        h1=lists.find('h1').get_text()
        content=lists.find('div',class_='content_nr').findAll('img')
        lists=[]
        for src in range(0,len(content)):
            pic=content[src]['src']
            lists.append((h1,pic))
        print(lists)
        try:
            self.cursor.executemany(self.sql, lists)
            self.conn.commit()
        except BaseException:
            print("数据库插入失败")

if __name__=="__main__":
    pic=PicSpider("win10_menu","win10_menu_type")
    pic.start()






