import re
import requests
import pymysql
import configparser
from bs4 import BeautifulSoup


def save_data(movice_list):
    conn = get_db_connection()
    cursor = conn.cursor()
    sql = 'insert into movice_top250(`name`,point,img_url,`desc`) values(%s,%s,%s,%s)'
    cursor.executemany(sql, movice_list)
    conn.commit()
    cursor.close()
    conn.close()


def get_db_connection():
    conf = configparser.ConfigParser()
    conf.read('mysql_db.ini')
    host = conf.get('mysql', 'host')
    port = conf.get('mysql', 'port')
    user = conf.get('mysql', 'user')
    pwd = conf.get('mysql', 'pwd')
    doubandb = conf.get('mysql', 'doubandb')
    conn = pymysql.connect(host=host, user=user, passwd=pwd, db=doubandb, port=int(port),
                           charset='utf8')
    return conn


def get_movie_list(html):
    soup = BeautifulSoup(respose.text, 'lxml')
    itemList = soup.findAll('div', attrs={"class": "item"})
    movieList = []
    for item in itemList:
        img = item.find('img')['src']
        title = item.find('span', attrs={"class": "title"}).get_text()
        star = item.find('span', attrs={"class": "rating_num"}).get_text()
        desc = item.find('span', attrs={"class": "inq"}).get_text()
        movieList.append([title, str(star), img, desc])
    for movie in movieList:
        print(movie[0] + '/' + movie[1] + '/' + movie[2] + '/' + movie[3])
    return movieList


for i in range(10):
    page = (i - 1) * 25
    url = 'https://movie.douban.com/top250?start=' + str(page) + '&filter='
    respose = requests.get(url)
    moivelist = get_movie_list(respose.text)
    save_data(moivelist)

# print(respose.status_code)# 响应的状态码
# print(respose.text)  #返回文本内容
