import requests
from bs4 import BeautifulSoup
import mysql.connector
import time
import random

# MySQL数据库连接配置
db_config = {
    'user': 'root',
    'password': '123456',
    'host': 'localhost',
    'database': 'movies',
    'charset': 'utf8mb4'
}

# 设置请求头
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)'
                  ' Chrome/112.0.0.0 Safari/537.36'
}

# 连接到MySQL数据库
try:
    cnx = mysql.connector.connect(**db_config)
    cursor = cnx.cursor()
    print("成功连接到MySQL数据库")
except mysql.connector.Error as err:
    print(f"连接数据库失败: {err}")
    exit(1)

# 插入数据的SQL语句，使用INSERT IGNORE
add_movie = ("INSERT IGNORE INTO movie_info "
             "(title, info, rating) "
             "VALUES (%s, %s, %s)")

# 爬取目标网站
base_url = 'https://movie.douban.com/top250'
start = 0
page = 1
while start < 250:  # 豆瓣Top250总共有10页
    url = f"{base_url}?start={start}&filter="
    try:
        response = requests.get(url, headers=headers)
        response.raise_for_status()  # 检查请求是否成功
        soup = BeautifulSoup(response.text, 'html.parser')

        # 提取电影信息
        movies = soup.find_all('div', class_='item')
        if not movies:
            print(f"第{page}页没有找到电影信息，可能已经到达最后一页。")
            break

        for item in movies:
            title = item.find('span', class_='title').text
            info = item.find('div', class_='bd').p.get_text().strip()
            rating = item.find('span', class_='rating_num').text
            # 插入数据到数据库
            cursor.execute(add_movie, (title, info, rating))
            if cursor.rowcount == 0:
                print(f"数据已存在，跳过插入: {title}")

        # 提交事务
        cnx.commit()
        print(f"第{page}页数据已保存，共{len(movies)}条记录。")

        # 更新start参数和页码
        start += 25
        page += 1
        # 随机等待1到3秒，避免被封IP
        time.sleep(random.uniform(1, 3))
    except requests.exceptions.RequestException as e:
        print(f"请求错误: {e}")
        break
    except mysql.connector.Error as err:
        print(f"数据库错误: {err}")
        break

# 关闭数据库连接
cursor.close()
cnx.close()
print("数据爬取完成")