import aiohttp
import  asyncio
from aiohttp import  ClientSession
from bs4 import  BeautifulSoup

async def fetch(url:str,session:ClientSession):
    async  with session.get(url) as reponse:
        return  await reponse.text()


def parse_html(html:str):
    soup = BeautifulSoup(html,'lxml')
    movies = soup.find_all('div',class_='item')
    moves_data = []
    for move in movies:
        title=move.find('span',class_='title').text.strip()
        rating=move.find('span',class_='rating_num').text.strip()
        moves_data.append({'title':title,'rating':rating})
    return moves_data


async def fetch_all(urls:list):
    async with aiohttp.ClientSession() as session:
        tasks = [fetch(url,session) for url in urls]
        html_pages = await asyncio.gather(*tasks)
        return html_pages


async def main():
    base_url = 'https://movie.douban.com/top250'
    urls = [f'{base_url}?start={i}' for i in range(0, 250, 25)]
    html_pages = await fetch_all(urls)

    all_movie_data = []
    for html in html_pages:
        movie_data = parse_html(html)
        all_movie_data.extend(movie_data)

    for movie in all_movie_data:
        print(f"电影：{movie['title']},评分：{movie['rating']}")

if __name__ == '__main__':
    asyncio.run(main())