# -*- coding: utf-8 -*-
"""
@Time    : 2024/7/11 18:21 
@Author  : ZhangShenao 
@File    : blocking_crawler.py
@Desc    : 阻塞式爬虫

asyncio库中,包含了大量操作协程相关的工具
"""
import asyncio

from bacis.decorator.perf_log_decorator import perf_log


# 使用async关键字,声明异步函数
async def crawl_page(url: str) -> None:
    """
    异步执行,爬取单个页面
    :param url: 页面url
    """
    print(f'crawling {url} ...')

    # await是阻塞调用,会等待协程执行完成
    await asyncio.sleep(float(url.split('-')[-1]))
    print(f'finish crawling {url}')


async def crawl_pages(urls: list) -> None:
    """
    异步执行,爬取多个页面
    :param urls: 页面url列表
    """
    for url in urls:
        await crawl_page(url)


@perf_log
def main():
    pages = [f'url-{x}' for x in range(1, 5)]

    # 使用asyncio.run执行异步函数
    result = asyncio.run(crawl_pages(pages))
    print(result)


# main()

# 直接调用异步函数,会返回一个coroutine object协程对象,而并不会真正执行这个函数
result = crawl_page('url-1')
print(result)
