'''
Author: wangyuxi yuxi007008@gmail.com
Date: 2023-11-09 14:43:49
LastEditors: wangyuxi yuxi007008@gmail.com
LastEditTime: 2024-01-05 17:47:49
FilePath: \cardmi\cardmi\spiders\hs.py
Description: 

Copyright (c) 2023 by ${git_name_email}, All Rights Reserved. 
'''
import csv

import scrapy

pages = 4
id = 145855


class KamiSpider(scrapy.Spider):
    name = 'hs'

    def start_requests(self):
        url = 'https://m2.hobbystocks.cn/py-app/api/v2.1/group/goods'

        for i in range(1, pages + 1):
            body = {
                "appid": "wx22a7349c29e688d4",
                "data": {
                    "groupInfoId": id
                },
                "departId": 103,
                "openid": "",
                "pageNo": i,
                "pageSize": 20,
                "smsRegisterId": "101d8559085d91ef25a",
                "timestamp": 1685670434430,
                "unionid": "",
                "userId": "",
                "userType": "THIRD_APP",
                "version": "2.0.59"
            }

            yield scrapy.http.JsonRequest(url=url, data=body, callback=self.parse)

    def parse(self, response, **kwargs):
        kami_list = response.json()['data']['pageGoods']['list']

        rows = []
        for kami in kami_list:
            setName = kami['setName']
            # no = kami['no']
            num = kami['num']
            playerName = kami['playerName']
            team = kami['team']
            seq = kami['seq']
            playerName_zn = kami['playerName_zn']
            team_zn = kami['team_zn']
            # teamSimpleName = kami['teamSimpleName']

            rows.append((setName, num, playerName, team, seq, playerName_zn, team_zn, '是', '是', '是'))

        with open('hs_{}.csv'.format(id), 'a+', newline='', encoding='gb18030') as f:
            writer = csv.writer(f)
            writer.writerows(rows)
