import requests
import aiohttp
import asyncio




async def fetch(session, url):
    async with session.get(url) as response:
        print(url)
        response_json = await response.json()
        print(response_json)

async def main():
    async with aiohttp.ClientSession() as session:
        urls = [f'https://spa3.scrape.center/api/movie/?limit=10&offset={(page - 1) * 10}' for page in range(1, 11)]
        tasks = [fetch(session, url) for url in urls]
        await asyncio.gather(*tasks)

asyncio.run(main())


