from pathlib import Path
import json

import requests
from bs4 import BeautifulSoup

headers = {
    'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.92 Safari/537.36'
}


def get_movie_info(start):
    url = f'https://movie.douban.com/top250?start={start}'

    r = requests.get(url, headers=headers, timeout=10)
    html = r.text
    return html

def save_image(name, url):
    '''保存图片'''
    r = requests.get(url, headers=headers, timeout=10)
    dirpath = Path('./images')
    if not dirpath.is_dir(): # 如果不是目录，则表明当前目录不存在
        dirpath.mkdir(parents=True)
    img = dirpath.joinpath(f'{name}.png')
    with open(img, 'wb') as f:
        f.write(r.content)

def parse_movie_info(html):
    soup = BeautifulSoup(html, 'lxml')
    # 获得class为grid_view的ol标签
    ol = soup.find('ol', class_='grid_view')
    # 获得ol标签下的所有li标签
    lis = ol.find_all('li')
    res = []
    for li in lis:
        try:
            info = li.find('div', class_='info')
            # 电影标题
            title = info.find('span', class_='title').text.strip()
            # 链式调用，获得电影图片的url
            img_url = li.find('div', class_='pic').find('img').get('src')
            save_image(title, img_url)
            # 电影背景
            background = info.find('div', class_='bd').text.strip()
            # 电影评分
            star = info.find('span', class_='rating_num').text.strip()
            # 电影描述
            desc = info.find('span', class_='inq').text.strip()
            data = {
                'img': img_url,
                'title': title,
                'background': background,
                'star': star,
                'desc': desc
            }
            res.append(data)
            print(f'{title}获取完成')
        except Exception as e:
            print(e)
    return res


if __name__ == '__main__':
    data = []
    for i in range(0, 250, 25):
        html = get_movie_info(i)
        res = parse_movie_info(html)
        data.extend(res)

    with open('movies.json', 'w') as f:
        content = json.dumps(data, ensure_ascii=False, indent=4)
        f.write(content)



