# -*- coding:utf8 -*-
import json
import traceback
import os
import typing
from scrapy import Request, FormRequest
from squirrel_core.frame.spider_makaka import MakakaSpider

file_path = os.environ.get("FILE_PATH", "/")


class runnb_score(MakakaSpider):
    name = "runnb_score"
    serialNumber = ""
    search_url = "https://www.runnb.cn/live/getLiveScoreJson.php"
    score_url = "https://www.runnb.cn/live/liveDetailNew.php"

    header = {
        "Host": "www.runnb.cn",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
        "Accept": "*/*"
    }

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        if data and isinstance(data, typing.MutableMapping):
            self.serialNumber = data.get("serialNumber", "")
            race_id = data.get("spider_config", {}).get("race_id", "")
            user_id = data.get("spider_config", {}).get("user_id", "")
            race_no = data.get("spider_config", {}).get("race_no", "")
            user_name = data.get("spider_config", {}).get("user_name", "")
            card_num = data.get("spider_config", {}).get("card_num", "")
            home_url = data.get("spider_config", {}).get("url", "")
            race_name = data.get("spider_config", {}).get("race_name", "").replace(" ", "")
            if not user_name and not race_no:
                self.logger.info("user_name和race_no必须存在一个")
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 101, "message": "user_name和race_no必须存在一个"})
                self.close_after_idle = True
                self.force_to_close_spider = True
            else:
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
                keyword = race_no or user_name
                yield Request(url=home_url, callback=self.parse, dont_filter=True, errback=self.err_parse, headers=self.header,
                              meta={"race_id": race_id, "race_no": race_no, "user_id": user_id, "race_name": race_name,
                                    "keyword": keyword, "user_name": user_name})

    def parse(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_no = response.meta.get("race_no", "")
        race_name = response.meta.get("race_name", "")
        user_name = response.meta.get("user_name", "")
        keyword = response.meta.get("keyword", "")
        try:
            self.logger.info(f"开始查找赛事组别")
            r_id = response.xpath("//input[@id='raceid']/@value").extract_first()
            rank = response.xpath("//input[@id='rank']/@value").extract_first()
            selectPage = response.xpath("//input[@id='selectPage']/@value").extract_first()
            selectPagesize = response.xpath("//input[@id='selectPagesize']/@value").extract_first()
            group_list = response.xpath("//select[@id='subjectid']/option/@value").extract()
            for group in group_list:
                post_data = {"pageSize": selectPagesize, "currentPage": selectPage, "keywords": keyword,
                             "subjectid": group, "raceid": r_id, "sex": "", "rank": rank}
                self.header.update({"X-Requested-With": "XMLHttpRequest", "Content-Type": "application/x-www-form-urlencoded"})
                yield FormRequest(url=self.search_url, callback=self.parse_user, formdata=post_data, dont_filter=True,
                                  errback=self.err_parse, headers=self.header,
                                  meta={"race_id": race_id, "race_no": race_no, "user_id": user_id, "r_id": r_id,
                                        "race_name": race_name, "group": group, "user_name": user_name})
        except Exception:
            self.logger.info(f"查找赛事组别时出错：{traceback.format_exc()}")

    def parse_user(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_no = response.meta.get("race_no", "")
        race_name = response.meta.get("race_name", "")
        group = response.meta.get("group", "")
        user_name = response.meta.get("user_name", "")
        r_id = response.meta.get("r_id", "")
        try:
            self.logger.info(f"开始查找{group}")
            runnerid = ""
            data_list = json.loads(response.text).get("data", [])
            if data_list:
                for data in data_list:
                    if user_name == data.get("name", "") and race_no == data.get("number", ""):
                        runnerid = data.get("id", "")
                        break
            if runnerid:
                post_data = {"id": r_id, "runnerid": runnerid}
                yield FormRequest(url=self.score_url, callback=self.parse_score, formdata=post_data, dont_filter=True,
                                  errback=self.err_parse, headers=self.header,
                                  meta={"race_id": race_id, "race_no": race_no, "user_id": user_id, "race_name": race_name})
        except Exception:
            self.logger.info(f"查找{group}时出错：{traceback.format_exc()}")

    def parse_score(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_no = response.meta.get("race_no", "")
        race_name = response.meta.get("race_name", "")
        try:
            self.logger.info(f"开始解析{race_no}的成绩:{response.url}")
            score = response.xpath("//div[@id='main']/p/text()").extract_first()
            base_div = response.xpath("//div[@id='main']/div[not(@id)]")
            r_name = base_div.xpath("./h1/text()").extract_first()
            group = base_div.xpath("./p/text()").extract_first()
            base_info_list = base_div.xpath("./table[@class='disp-table']/tbody//text()").extract()
            base_info_list = list(filter(lambda x: x.strip() != "", base_info_list))
            title_list = base_info_list[1::2]
            value_list = base_info_list[0::2]
            base_dict = dict(zip(title_list, value_list))
            section_list = base_div.xpath("./table[@class='disp-table']/table[@class='layui-table']/tr//text()").extract()
            section_list = list(filter(lambda x: x.strip() != "", section_list))
            section_list = [section_list[i:i + 6] for i in range(0, len(section_list), 6)]
            score_info = []
            for data in section_list:
                cp_name = data[0]
                start_time = data[1]
                stop_time = data[3]
                speed = data[4]
                score_info.append(
                    {"cp_name": cp_name, "start_time": start_time, "score": stop_time, "distance": "", "pace": speed})
            if score_info:
                score_info[-1]["cp_name"] = "FINISH"
            score_data = {"race_name": r_name or race_name, "itemName": group, "index": base_dict.get("枪声成绩排名(全部)", ""),
                          "name": base_dict.get("姓名", ""), "raceNo": base_dict.get("参赛号", race_no),
                          "shotScore": base_dict.get("枪声成绩", ""), "distance": "",
                          "score_status": "", "score": score, "sectionScore": score_info}
            result = self.result_item_assembler(response)
            result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                     "crawlerType": self.name_second, "data": str(score_data)}
            yield result
        except Exception:
            self.logger.info(f"查找{race_no}的成绩时出错{response.url}：{traceback.format_exc()}")

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")

