import scrapy
import csv
import time
pages = 8
id = 'GT461505C'


class CardWorldSpider(scrapy.Spider):
    name = 'ka_lai'

    def start_requests(self):
        urls = ['https://page.ssl1.ka-world.com/api/v2.1/good/{}/noList?pageIndex={}&pageSize=30'.format(id, i) for i in range(1, pages + 1)]
        for url in urls:
            time.sleep(1)
            yield scrapy.http.JsonRequest(url=url, callback=self.parse)

    def parse(self, response, **kwargs):
        kami_list = response.json()['list']

        rows = []
        for kami in kami_list:
            setName = kami.get('column4', '')
            num = kami.get('column3', '')
            playerName = kami.get('column2_English', '')
            team = kami.get('column1_English', '')
            seq = kami.get('column5', '')
            playerName_zn = kami.get('column1', '')
            team_zn = kami.get('column2', '')
            # teamSimpleName = kami['teamSimpleName']


            rows.append((setName, num, playerName, team, seq, playerName_zn, team_zn, '是', '是', '是'))

        with open('card_world_{}.csv'.format(id), 'a+', newline='') as f:
            writer = csv.writer(f)
            writer.writerows(rows)

