#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
async_crawler_demo.py
目标：并发抓取 Top 250 电影标题，用信号量限流 10，超时 10s，自动重试 3 次。
运行：python async_crawler_demo.py
环境：Python≥3.8
依赖：pip install aiohttp aiofiles tqdm
"""
import asyncio, aiohttp, aiofiles, sys
from tqdm.asyncio import tqdm_asyncio

START_URL = "https://movie.douban.com/top250?start={}"
MAX_PAGE = 10  # 每页 25 条，共 250 条
CONCURRENCY = 10
TIMEOUT = aiohttp.ClientTimeout(total=10, sock_connect=5)

headers = {
    "User-Agent": "Mozilla/5.0 (compatible; AsyncCrawler/1.0; +https://example.com/bot)"
}


async def fetch(session, url, semaphore: asyncio.Semaphore):
    """带重试、限流、异常捕获的抓取协程"""
    async with semaphore:  # 限流
        for attempt in range(1, 4):
            try:
                async with session.get(url, headers=headers, timeout=TIMEOUT) as resp:
                    resp.raise_for_status()
                    return await resp.text()
            except Exception as e:
                if attempt == 3:
                    print(f"[WARN] 放弃 {url} : {e}")
                    return None
                await asyncio.sleep(2**attempt * 0.5)


async def parse(html: str):
    """极简解析，提取 25 个标题"""
    from bs4 import BeautifulSoup

    soup = BeautifulSoup(html, "lxml")
    return [tag.get_text(strip=True) for tag in soup.select(".title:nth-child(1)")]


async def worker(session, semaphore, pbar):
    tasks = [
        asyncio.create_task(fetch(session, START_URL.format(start), semaphore))
        for start in range(0, MAX_PAGE * 25, 25)
    ]
    async for coro in tqdm_asyncio.as_completed(tasks, desc="Crawl"):
        html = await coro
        if html:
            titles = await parse(html)
            pbar.update(len(titles))
            async with aiofiles.open("titles.txt", "a", encoding="utf8") as f:
                await f.write("\n".join(titles) + "\n")


async def main():
    semaphore = asyncio.Semaphore(CONCURRENCY)
    pbar = tqdm_asyncio(total=MAX_PAGE * 25, desc="Saved")
    connector = aiohttp.TCPConnector(
        limit=100,  # 总连接池
        limit_per_host=30,  # 单域名
        ttl_dns_cache=300,
        use_dns_cache=True,
        ssl=False,  # 自签站点可关闭
    )
    async with aiohttp.ClientSession(connector=connector) as session:
        await worker(session, semaphore, pbar)
    pbar.close()


if __name__ == "__main__":
    # Windows 兼容
    if sys.platform.startswith("win"):
        asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
    asyncio.run(main())
