from typing import AsyncIterator, Any

import scrapy


class Quotes1Spider(scrapy.Spider):
    name = "quotes1"


    # start_urls = [f"https://httpbin.org/get?a={i}" for i in range(10)]

    async def start(self):
        for url in [f"https://httpbin.org/get?a=100"for i in range (2)]:
            yield scrapy.Request(url=url,dont_filter=True)


    # def start_requests(self):
    #     yield scrapy.Request(url="https://httpbin.org/get?a=10")

    def parse(self, response):
        print(response.json())
