import asyncio
import aiohttp

# 定义一个协程函数来爬取网页
async def fetch(session, url):
    async with session.get(url) as response:
        return await response.text()

# 主协程函数
async def main():
    urls = [
        "https://www.example.com",
        "https://www.python.org",
        "https://www.github.com"
    ]
    async with aiohttp.ClientSession() as session:
        # 创建任务列表
        tasks = [fetch(session, url) for url in urls]
        # 并发运行任务
        htmls = await asyncio.gather(*tasks)
        for html in htmls:
            print(len(html))

if __name__ == "__main__":
    # 运行主协程
    asyncio.run(main())

