# -*- coding: utf-8 -*-
# @Time    : 2024/1/23 21:24
# @Author  : micah
# @File    : 3.异步并发.py
# @Software: PyCharm

import asyncio
import aiohttp


async def baidu_spider():
    print("---百度蜘蛛---")
    url = "https://www.baidu.com"
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as r:
            return await r.text()


async def sogou_spider():
    print("---搜狗蜘蛛---")
    url = "https://www.sogou.com"  # 注意: 搜狗的正确URL是 https://www.sogou.com/，但这里可能是个示例或错误
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as r:
            return await r.text()


async def jingdong_spider():
    print("---京东蜘蛛---")
    url = "https://www.jd.com"
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as r:
            return await r.text()


async def main():
    tasks = [
        baidu_spider(),
        sogou_spider(),
        jingdong_spider()
    ]
    results = await asyncio.gather(*tasks)
    for result in results:
        print(f'返回的内容为: {result}')


if __name__ == '__main__':
    asyncio.run(main())
