from typing import AsyncIterator, Any

import scrapy


class QuchongSpider(scrapy.Spider):
    name = "quchong"

    # allowed_domains = ["quotes.toscrape.com"]
    # start_urls = ["http://httpbin.org/get?age=10" for i in range(2)]  #默认不去重
    async def start(self):
        urls = ["http://httpbin.org/get?age=10" for i in range(2)]  # 默认去重
        for url in urls:
            yield scrapy.Request(url, dont_filter=True)  # dont_filter=True表示不去重
            # fp fingerprint 类似于把请求信息哈希，生成一个唯一的指纹放在集合里
        yield {"sss": 74154}

    def parse(self, response):
        print(response.json())
        yield {"sds": 1212}
