import asyncio
import aiohttp
from bs4 import BeautifulSoup

async def fetch(url):
    connector = aiohttp.TCPConnector(ssl=False)
    async with aiohttp.ClientSession(connector=connector) as session:
        async with session.get(url) as response:
            return await response.text()

async def main():
    urls = [
        f"https://www.cnblogs.com/#p{page}"
        for page in range(1, 10 + 1)
    ]
    tasks = [fetch(url) for url in urls]
    results = await asyncio.gather(*tasks)

    for result in results:
        pages = parser(result)
        for page in pages:
            print(page)

def parser(html):
    soup = BeautifulSoup(html, 'html.parser')
    links = soup.find_all('a', 'post-item-title')  # 标签，class
    return [(link['href'], link.get_text()) for link in links]



if __name__ == '__main__':
    asyncio.run(main())
