import asyncio
import time

# 协程需要有 IO、阻塞等才可切换，展现其性能


# async def func1():
#     print('我是func1')
#     await asyncio.sleep(1)   # 发生阻塞的前面写 await
#     print('func1结束')
#
# async def func2():
#     print('我是func2')
#     await asyncio.sleep(2)
#     print('func2结束')
#
# async def func3():
#     print('我是func3')
#     await asyncio.sleep(3)
#     print('func3结束')
#
# if __name__ == '__main__':
#     start_time = time.time()
#     f1 = func1()
#     f2 = func2()
#     f3 = func3()
#
#     # 把三个任务放一起
#     tasks = [
#         f1,
#         f2,
#         f3
#     ]
#
#     asyncio.run(asyncio.wait(tasks))
#     print(time.time() - start_time)


async def download(url,t):
    print('我要下载了！')
    await asyncio.sleep(t)
    print('我下载完了！')

async def main():
    # 假设已经有了一堆的下载链接
    urls = [
        "https://chen.com",
        "https://li.com",
        "https://mo.com"
    ]
    # 需要封装任务列表
    tasks = []
    for url in urls:
        # tasks.append(download(url,3))  可换成下面的这种写法，创建一个任务就好
        task = asyncio.create_task(download(url,3))
        tasks.append(task)

    # 统一等到协程任务执行完毕
    await asyncio.wait(tasks)  # 这种写法会有 DeprecationWarning（弃用警告），也可以不用管它

if __name__ == '__main__':
    asyncio.run(main())

'''
未来爬虫：
    1. 扫url，拿到一堆url
    2. 循环url，创建任务
    3. 统一await
'''


