import requests
import pymysql
from bs4 import BeautifulSoup

#将爬到3页数据插入数据库
class Spider(object):
    url = ''
    header = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36'
    }
    conn = pymysql.connect(
        host='localhost', user='root', password="admin",
        database='python', port=3306,
        charset='utf8'
    )
    cursor = conn.cursor()

    def insert(self,list,sql="insert into cmdb_movie(title,intro,score) values(%s,%s,%s)"):
        self.cursor.executemany(sql,list)
        self.conn.commit()

    def __fetch_content(self):
        resObj=requests.get(Spider.url,headers=Spider.header)
        resObj.encoding="utf-8"
        html=resObj.text

        return html
    def __analysis(self,html):
        soup = BeautifulSoup(html, 'lxml')
        lists=soup.find(id='wrapper').find_all('li')
        products=[]
        for product in lists:
            score = product.find('span', class_='rating_num').get_text()
            intro= product.find('span', class_='inq').get_text()
            title=product.find('span', class_='title').get_text()
            products.append({'title':title,'intro':intro,'score':score})
        return products

    def show(self,products):
        for product in products:
            print(product)

    def guolv(self, products):
        dd=[tuple(p.values()) for p in products ]
        return dd

    def go(self):
        html = self.__fetch_content()
        products=self.__analysis(html)
        dd=self.guolv(products)
        self.insert(dd)
        print(dd)

    def fenye(self,page):
        for i in range(1,page+1):
            Spider.url = "https://movie.douban.com/top250"
            print(self.url)
            self.go()

if __name__=="__main__":
    spider=Spider()
    spider.fenye(1)