from typing import AsyncIterator, Any

import scrapy


class Game520Spider(scrapy.Spider):
    name = "game520"
    # start_urls = ["https://a.com"]

    async def start(self):
        urls = [f'https://www.gamer520.com/page/{page}' for page in range(1, 886)]
        for url in urls:
            yield scrapy.Request(url)
    def parse(self, response):
        items = response.xpath()
