import asyncio
import time
import aiohttp
# from single_thread import urls, headers
from bs4 import BeautifulSoup as bs
import uvloop


urls = ['https://www.baidu.com']*100

# semaphore = asyncio.Semaphore(20) # 并发度限制


async def save(data):
    with open('a.json', 'a') as f:
        # title = bs(data, features='lxml').title.text
        f.write(data)

async def crawl(url):
    # async with semaphore:
    async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(limit=64,ssl=False)) as session:
        async with session.get(url) as resp:
            data = await resp.text()
            await save(data)
        # await asyncio.sleep(1)
 

if __name__ == '__main__':
    
    start = time.time()
    
    asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())

    loop = asyncio.get_event_loop()
    tasks = [loop.create_task(crawl(url)) for url in urls]

    loop.run_until_complete(asyncio.wait(tasks))
    loop.close()

    print(time.time() - start)   # 不稳定 ，，，， 
