import requests
import pymysql
from bs4 import BeautifulSoup

#将爬到3页数据插入数据库
class Spider(object):
    url = ''
    header = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36'
    }
    conn = pymysql.connect(
        host='localhost', user='root', password="admin",
        database='python', port=3306,
        charset='utf8'
    )
    cursor = conn.cursor()

    def insert(self,list,sql="insert into cmdb_pc(id,title,href,sign) values(NULL,%s,%s,%s)"):
        self.cursor.executemany(sql,list)
        self.conn.commit()

    def __fetch_content(self):
        resObj=requests.get(Spider.url,headers=Spider.header)
        resObj.encoding="utf-8"
        html=resObj.text

        return html
    def __analysis(self,html):
        soup = BeautifulSoup(html, 'lxml')
        lists=soup.find(_wind='columnname=video').find_all('li',class_='list_item')
        products=[]
        for product in lists:
            sign = product.find('div', class_='figure_desc').get_text()
            data = product.find('div', class_='figure_title_score').find('a')
            title=data.get('title')
            href=data.get('href')
            products.append({'title':title,'href':href,'sign':sign})
        return products

    def show(self,products):
        for product in products:
            print(product)

    def sp(self, products):
        dd=[tuple(p.values()) for p in products ]
        return dd

    def go(self):
        html = self.__fetch_content()
        products=self.__analysis(html)
        res=self.sp(products)
        self.insert(res)
        print(res)

    def fenye(self,page):
        for i in range(1,page+1):
            Spider.url = "http://v.qq.com/x/list/cartoon?offset=0&sort=4"
            print(self.url)
            self.go()

if __name__=="__main__":
    spider=Spider()
    spider.fenye(1)