from typing import AsyncIterator, Any

import scrapy


class QuotesAllPageSpider(scrapy.Spider):
    name = "quotes_allpage"
    # start_urls = [f'https://quotes.toscrape.com/page/{i}/' for i in range(1, 11)]

    # async def start(self):
    #     for url in [f'https://quotes.toscrape.com/page/{i}/' for i in range(1, 11)]:
    #         yield scrapy.Request(url=url)

    # def parse(self, response):
    #     items = response.xpath('//div[@class="quote"]')
    #     for item in items:
    #         print({
    #             'author': item.xpath('.//small[@class="author"]/text()').get(),
    #             'text': item.xpath('./span[@class="text"]/text()').get(),
    #             'tags': item.xpath('.//a[@class="tag"]/text()').getall(),
    #         })

    #
    # async def start(self):
    #     yield scrapy.Request("https://quotes.toscrape.com/")
    #
    # def parse(self, response):
    #     self.log(f"正在爬取地址{response.url}")
    #     items = response.xpath('//div[@class="quote"]')
    #     # print(len(items))
    #     for item in items:
    #         data = {
    #             'author': item.xpath('.//small[@class="author"]/text()').get(),
    #             'text': item.xpath('./span[@class="text"]/text()').get(),
    #             'tags': item.xpath('.//a[@class="tag"]/text()').getall(),
    #         }
    #         yield data
    #     next_url = response.xpath('//li[@class="next"]/a/@href').get()
    #     if next_url:
    #         # yield scrapy.Request(f'https://quotes.toscrape.com{next_url}')
    #         # next_url = response.urljoin(next_url)
    #         # yield scrapy.Request(next_url)
    #
    #         yield response.follow(next_url)
    #
    #     else:
    #         print(f"爬取结束")


    # start_urls = ["https://httpbin.org/get?age=10" for i in range(10)]

    async def start(self):
        for url in  ["https://httpbin.org/get?age=10" for i in range(10)]:
            yield scrapy.Request(url=url)
        yield {'abc':'1',}

    def parse(self, response):
        print(response.json())

