# -*- coding:utf8 -*-
import json
import traceback
import os
import typing
from scrapy import Request
from squirrel_core.frame.spider_makaka import MakakaSpider

file_path = os.environ.get("FILE_PATH", "/")


class kunyue_score(MakakaSpider):
    name = "kunyue_score"
    serialNumber = ""
    header = {
        "Host": "app.queenrun.cn",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) NetType/WIFI MiniProgramEnv/Windows WindowsWechat/WMPF WindowsWechat(0x63090a13) UnifiedPCWindowsWechat(0xf2541022) XWEB/16467",
        "Accept": "*/*",
        "Content-Type": "application/json;charset=UTF-8"
    }

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        if data and isinstance(data, typing.MutableMapping):
            self.serialNumber = data.get("serialNumber", "")
            race_id = data.get("spider_config", {}).get("race_id", "")
            user_id = data.get("spider_config", {}).get("user_id", "")
            race_no = data.get("spider_config", {}).get("race_no", "")
            user_name = data.get("spider_config", {}).get("user_name", "")
            card_num = data.get("spider_config", {}).get("card_num", "")
            race_name = data.get("spider_config", {}).get("race_name", "").replace(" ", "")
            home_url = data.get("spider_config", {}).get("url", "")
            if not card_num:
                self.logger.info("card_num必须存在")
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 203, "message": "card_num必须存在"})
                self.close_after_idle = True
                self.force_to_close_spider = True
            else:
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
                url = f"https://app.queenrun.cn/api/v2/race/result/info?competitionName=新日2025锡山宛山湖马拉松&competitionNumber={race_no}&certificateNumber={card_num}"
                yield Request(url=url, callback=self.parse, dont_filter=True, errback=self.err_parse, headers=self.header,
                              meta={"race_id": race_id, "user_id": user_id, "user_name": user_name,
                                    "race_name": race_name, "card_num": card_num, "race_no": race_no})

    def parse(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        user_name = response.meta.get("user_name", "")
        race_no = response.meta.get("race_no", "")
        card_num = response.meta.get("card_num", "")
        try:
            self.logger.info(f"获取{user_name}的成绩信息")
            content = json.loads(response.text).get("data", {})
            data_list = content.get("results", [])
            if data_list:
                for data in data_list:
                    if race_name in data.get("competitionName", ""):
                        score_data = {"race_name": data.get("competitionName", race_name), "itemName": data.get("groupName", ""),
                                      "name": data.get("competitionName", user_name), "raceNo": data.get("competitionNumber", race_no),
                                      "pace": data.get("averagePace", ""), "speed": data.get("speedAll", ""),
                                      "distance": "", "shotScore": data.get("gunshotScoreAll", ""),
                                      "score": data.get("netScoreAll", ""), "origin_data": content}
                        result = self.result_item_assembler(response)
                        result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                                 "crawlerType": self.name_second, "data": str(score_data)}
                        yield result
            else:
                result = self.result_item_assembler(response)
                result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                         "crawlerType": self.name_second, "data": str({"msg": "未查到相关成绩信息"})}
                yield result
        except Exception:
            self.logger.info(f"获取{user_name}的成绩信息错误：{traceback.format_exc()}")

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")

