import requests
from bs4 import BeautifulSoup
import pymysql

# 定义要解析的网页 URL
url = 'https://wandou.la/hot/movie'

# 发送 GET 请求获取网页内容
response = requests.get(url)

# 检查请求是否成功
if response.status_code == 200:
    # 解析网页内容
    soup = BeautifulSoup(response.text, 'html.parser')

    # 提取网页中的特定元素或信息
    title = soup.title.string  # 提取网页标题
    paragraphs = soup.find_all('p', attrs={"class":"name"})  # 找到所有 <p> 元素 class="name"

    for paragraph in paragraphs:
        print(paragraph.get_text())  # 打印每个段落的文本内容

    # 将获取到的电影名存储到数据库中
    # 使用参数名连接数据库
    conn = pymysql.connect(host='192.168.18.13', port=33046, user='root', password='123456', db='xuweijie',
                           charset='utf8')

    # 创建游标
    cursor = conn.cursor()

    for paragraph in paragraphs:
        # 执行 SQL 语句
        sql = "INSERT INTO movies (name) VALUES (%s)"
        val = (paragraph.get_text(),)
        cursor.execute(sql, val)

    # 提交更改
    conn.commit()

    # 关闭游标和连接
    cursor.close()
    conn.close()



else:
    print("请求失败，状态码：", response.status_code)