'''
# 异步IO库： asyncio
import asyncio

# 获取事件循环
loop = asyncio.get_event_loop()

# 定义协程
async def myfunc(url):
    await get_url(url)

# 创建task列表
tasks = [loop.create_task(myfunc(url)) for url in urls]

# 执行爬虫时间列表
loop.run_until_complete(asyncio.wait(tasks))

# ---- 注意 --------
# 要用在异步IO编程中，依赖的库必须支持异步IO特性
# 爬虫引用中： requests 不支持异步需要用 aiohttp
'''

import asyncio
import aiohttp

async def async_craw(url):
    print('craw url: ', url)
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as response:
            result = await response.text()
            print(f'craw url: {url}, {len(result)}')


loop = asyncio.get_event_loop()

urls = [f"https://www.baidu.com/#p{page}" for page in range(1, 50+1)]
tasks = [loop.create_task(async_craw(url)) for url in urls]

import time
start = time.time()
loop.run_until_complete(asyncio.wait(tasks))
end = time.time()
print("use time cost: ", end - start, " seconds.")