"""
@Author : 合肥一元数智教育科技有限公司
@Date :  2025/7/8 9:40
@Description : 
猫眼电影
"""
import random
import re
import time
import pymysql

import requests

"""
str_reg = '<div class="board-item-content">.*?<a .*?>(.*?)</a></p>.*?<p class="star">(.*?)</p>.*?<p class="releasetime">(.*?)</p>'
# [(title,star,time),()]
pattern = re.compile(str_reg, re.S)

"""

"""
 多页面url地址规则
'https://www.maoyan.com/board/4?offset=0'    第一页
'https://www.maoyan.com/board/4?offset=10'   第二页
'https://www.maoyan.com/board/4?offset=20'   第三页

offset =(page-1)*10
'https://www.maoyan.com/board/4?offset={offset}'

"""

"""

url = 'https://www.maoyan.com/board/4?offset={}'
headers = {
    'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36',
    'Cookie': '__mta=217497513.1751938850056.1751940354501.1751940360375.11; uuid_n_v=v1; uuid=90D80F705B9C11F08B10FB8CD97D095C4D54880AA20844C991EE4A86D388CA7A; _csrf=4100a2e958bd6e7ec4fdd9ade1b49e234b7152067c6edaf2dfd724149e30b654; Hm_lvt_e0bacf12e04a7bd88ddbd9c74ef2b533=1751938848; HMACCOUNT=472BD9641B52F7A9; _ga=GA1.1.1973117851.1751938848; _lx_utm=utm_source%3DBaidu%26utm_medium%3Dorganic; _lxsdk_cuid=197e7b15b02c8-04da3006ea34918-26011e51-137ca0-197e7b15b03c8; _lxsdk=90D80F705B9C11F08B10FB8CD97D095C4D54880AA20844C991EE4A86D388CA7A; __mta=217497513.1751938850056.1751938850056.1751938850056.1; Hm_lpvt_e0bacf12e04a7bd88ddbd9c74ef2b533=1751940360; _lxsdk_s=197e7b15b04-913-01b-545%7C%7C22; _ga_WN80P4PSY7=GS2.1.s1751938847$o1$g1$t1751940967$j60$l0$h0'
}

for i in range(1, 11):
    offset = (i - 1) * 10
    current_url = url.format(offset)
    print(current_url)
    response = requests.get(current_url, headers=headers)
    time.sleep(random.randint(1, 5))
    html = response.text
    # 解析网页
    r_list = pattern.findall(html)
    for r in r_list:
        print(f'电影名:{r[0]},导演:{r[1].strip()},上映时间:{r[2].strip()}')

    print(f'>>>>>>>>>>>>>>>>>>>>>>>>>>>{i}页数据抓取完毕<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')

"""


class MaoYanSpider:
    def __init__(self):
        # url
        self.url = 'https://www.maoyan.com/board/4?offset={}'
        # headers
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36',
            'Cookie': '__mta=217497513.1751938850056.1751940354501.1751940360375.11; uuid_n_v=v1; uuid=90D80F705B9C11F08B10FB8CD97D095C4D54880AA20844C991EE4A86D388CA7A; _csrf=4100a2e958bd6e7ec4fdd9ade1b49e234b7152067c6edaf2dfd724149e30b654; Hm_lvt_e0bacf12e04a7bd88ddbd9c74ef2b533=1751938848; HMACCOUNT=472BD9641B52F7A9; _ga=GA1.1.1973117851.1751938848; _lx_utm=utm_source%3DBaidu%26utm_medium%3Dorganic; _lxsdk_cuid=197e7b15b02c8-04da3006ea34918-26011e51-137ca0-197e7b15b03c8; _lxsdk=90D80F705B9C11F08B10FB8CD97D095C4D54880AA20844C991EE4A86D388CA7A; __mta=217497513.1751938850056.1751938850056.1751938850056.1; Hm_lpvt_e0bacf12e04a7bd88ddbd9c74ef2b533=1751940360; _lxsdk_s=197e7b15b04-913-01b-545%7C%7C22; _ga_WN80P4PSY7=GS2.1.s1751938847$o1$g1$t1751940967$j60$l0$h0'
        }
        self.connection = pymysql.connect(host='localhost',port=3306, user='root', passwd='root', database='python2511',
                                          charset='utf8')

    def get_html(self, url):
        response = requests.get(url=url, headers=self.headers)
        html = response.text
        self.parse_html(html)

    def parse_html(self, html):
        str_reg = '<div class="board-item-content">.*?<a .*?>(.*?)</a></p>.*?<p class="star">(.*?)</p>.*?<p class="releasetime">(.*?)</p>'
        pattern = re.compile(str_reg, re.S)
        r_list = pattern.findall(html)
        self.save_html(r_list)

    def save_html(self, data):
        """数据持久化 mysql"""
        str_sql = 'INSERT INTO `python2511`.`maoyan`(`name`, `star`, `time`) VALUES (%s, %s, %s)'
        cursor = self.connection.cursor()
        for r in data:
            name = r[0]
            star = r[1].split("：")[1].strip()
            time = r[2].split("：")[1].strip()
            cursor.execute(str_sql, (name, star, time))
            # 提交事务
            self.connection.commit()

    def run(self):
        for i in range(2, 11):
            offset = (i - 1) * 10
            url = self.url.format(offset)
            self.get_html(url)
            # 访问频率控制
            time.sleep(random.randint(1, 5))
            print(f'>>>>>>>>>>>>>>>>>>>>>>>>>>>{i}页数据抓取完毕<<<<<<<<<<<<<<<<<<<<<<<<<<<<<')


if __name__ == '__main__':
    spider = MaoYanSpider()
    spider.run()
