import aiohttp
import asyncio

from aiohttp import AsyncResolver

CONCURRENCY = 10
semaphore=asyncio.Semaphore(CONCURRENCY)

async def fetch(session: aiohttp.ClientSession, url: str):
    async with semaphore:
        try:
            async with session.get(url) as response:
                content = await response.text()
                print(f'{url}执行结果{type(content)}')
                return url, response.status, content
        except aiohttp.ClientError as exc:
            return url, None, exc



async def todo(_urls:list):
    connector = aiohttp.TCPConnector(
        resolver=AsyncResolver(nameservers=["119.29.29.29", "114.114.114.114"]),
        use_dns_cache=True,
        limit=100
    )

    timeout = aiohttp.ClientTimeout(
        total=20,
        connect=15,
        sock_connect=5
    )

    async with aiohttp.ClientSession(connector=connector,timeout=timeout) as session:
        # 创建所有任务
        tasks = [fetch(session, url) for url in _urls]
        # 并发执行所有任务并获取结果
        results = await asyncio.gather(*tasks)

        # 处理结果
        for url, status, content in results:
            print(f"URL: {url}")
            print(f"状态码: {status}")
            print(f"内容长度: {content}")
            print('=========================================================================================')


if __name__ == "__main__":
    # 要请求的URL列表
    urls = [
        "https://www.baidu.com",
        "http://172.16.41.105",
        "http://www.github.com",
        "http://www.creditmaker.com.cn",
    ]

    # 运行事件循环
    asyncio.run(todo(urls))