import concurrent
import concurrent.futures
import requests
import asyncio
import aiohttp

# for page in range(1, 1000):
#     url = f'https://randomuser.me/api/?page={page}&results=20&seed=abc'
#     response = requests.get(url)
#     data = response.json()
#     for i in data["results"]:
#         print(i['name'])


# async def fetch(session, url):
#     async with session.get(url) as response:
#         data = await response.json()
#         for i in data["results"]:
#             print(i['name'])
#
#
# async def main():
#     async with aiohttp.ClientSession() as session:
#         urls = [f'https://randomuser.me/api/?page={page}&results=20' for page in range(1, 100)]
#         for i in range(0,len(urls),10):
#             batch = urls[i:i+10]
#             tasks = [fetch(session, url) for url in batch]
#             await asyncio.gather(*tasks)
#             await asyncio.sleep(1)
#
#
# asyncio.run(main())


urls = [f'https://randomuser.me/api/?page={page}&results=20' for page in range(1, 100)]


def fetch(url):
    resp = requests.get(url)
    data = resp.json()
    for i in data["results"]:
        print(i['name'])


with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
    for url in urls:
        executor.submit(fetch, url)
