# -*- coding:utf8 -*-
import json
import traceback
import os
import re
import typing
from scrapy import Request
from urllib.parse import urlparse
from squirrel_core.commons.utils.tools import calc_str_md5, upload_file
from squirrel_core.frame.spider_makaka import MakakaSpider

file_path = os.environ.get("FILE_PATH", "/")


class swordfish_score(MakakaSpider):
    name = "swordfish_score"
    serialNumber = ""
    host = ""

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        if data and isinstance(data, typing.MutableMapping):
            self.serialNumber = data.get("serialNumber", "")
            race_id = data.get("spider_config", {}).get("race_id", "")
            user_id = data.get("spider_config", {}).get("user_id", "")
            race_no = data.get("spider_config", {}).get("race_no", "")
            user_name = data.get("spider_config", {}).get("user_name", "")
            card_num = data.get("spider_config", {}).get("card_num", "")
            race_name = data.get("spider_config", {}).get("race_name", "").replace(" ", "")
            home_url = data.get("spider_config", {}).get("url", "")
            if not user_name and not race_no:
                self.logger.info("user_name和race_no必须存在一个")
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 101, "message": "user_name和race_no必须存在一个"})
                self.close_after_idle = True
                self.force_to_close_spider = True
            else:
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
                if "2025横店马拉松" in race_name:
                    url = f"http://8.141.3.200:6013/api/My/GetOss/{card_num}/91"
                    yield Request(url=url, callback=self.parse_hdian, dont_filter=True, errback=self.err_parse,
                                  meta={"race_id": race_id, "user_id": user_id, "race_no": race_no,
                                        "race_name": race_name, "user_name": user_name})
                else:
                    keyword = race_no or user_name
                    domain = urlparse(home_url).netloc
                    if domain == "offroad.china-swordfish.com":
                        num = "".join(re.findall(r"offroadorder/(\d+)", home_url)).strip()
                        self.host = "8.141.3.200:6060"
                    if domain in ["queryinfo.china-swordfish.com", "sfs.china-swordfish.com"]:
                        self.host = "39.105.11.100:6031"
                        num = "".join(re.findall(r"(?:sfsqueryinfoimg|querycertsingle|sfsqueryscore)/(\d+)", home_url)).strip()
                    if self.host:
                        url = f"http://{self.host}/api/OffRoad/GetCertInfo/{num}"
                        yield Request(url=url, callback=self.parse, dont_filter=True, errback=self.err_parse,
                                      meta={"race_id": race_id, "user_id": user_id,
                                            "race_name": race_name, "keyword": keyword})

    def parse(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        keyword = response.meta.get("keyword", "")
        try:
            self.logger.info(f"开始解racecode信息:{response.url}")
            content = json.loads(response.text).get("data", {})
            code = content.get("RaceCode", "")
            url = f"http://{self.host}/api/OffRoad/GetSearchValuePaged?PageNumber=1&PageSize=20&searchValue={keyword}"
            yield Request(url=url, callback=self.parse_base, dont_filter=True,
                          errback=self.err_parse, headers={"X-Race-Code": code},
                          meta={"race_id": race_id, "code": code, "user_id": user_id,
                                "race_name": race_name, "keyword": keyword})
        except Exception:
            self.logger.info(f"解析racecode信息时出错{response.url}：{traceback.format_exc()}")

    def parse_base(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        code = response.meta.get("code", "")
        keyword = response.meta.get("keyword", "")
        try:
            self.logger.info(f"开始查找{keyword}基本成绩信息:{response.url}")
            data_list = json.loads(response.text).get("data", {}).get("items", [])
            for data in data_list:
                pic_id = data.get("onlyid", "")
                score_data = {"race_name": race_name, "itemName": data.get("igroup", ""),
                              "name": data.get("chinesename", ""), "raceNo": data.get("bib", ""), "pace": data.get("pace", ""),
                              "distance": f"{data.get('distance', 0)}KM", "shotScore": data.get("gross", ""), "score": data.get("net", "")}
                score_id = data.get("id")
                url = f"http://{self.host}/api/OffRoad/GetPalyerScoreInfo/{score_id}"
                yield Request(url=url, callback=self.parse_score, dont_filter=True,
                              errback=self.err_parse, headers={"X-Race-Code": code},
                              meta={"race_id": race_id, "score_data": score_data, "user_id": user_id,
                                    "race_name": race_name, "keyword": keyword, "pic_id": pic_id})
        except Exception:
            self.logger.info(f"查找{keyword}基本成绩信息时出错{response.url}：{traceback.format_exc()}")

    def parse_score(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        group_name = response.meta.get("group_name", "")
        keyword = response.meta.get("keyword", "")
        pic_id = response.meta.get("pic_id", "")
        score_data = response.meta.get("score_data", {})
        try:
            self.logger.info(f"开始解析{keyword}的成绩:{response.url}")
            score_info = []
            data_list = json.loads(response.text).get("data", [])
            for data in data_list:
                cp_name = data.get("spname", "")
                start_time = data.get("sptime", "")
                stop_time = data.get("splap", "")
                cp_length = data.get("spdist", "")
                speed = ""
                score_info.append(
                    {"cp_name": cp_name, "start_time": start_time, "score": stop_time, "distance": cp_length,
                     "pace": speed})
            if score_info:
                score_info[-1]["cp_name"] = "FINISH"
            score_data["sectionScore"] = score_info
            score_data.update({"fastPace": "", "slowPace": "", "certImg": ""})
            if pic_id:
                url = f"http://{self.host}/api/OffRoad/GetCertByOnlyId/{pic_id}"
                yield Request(url=url, callback=self.parse_cert, errback=self.err_parse, dont_filter=True,
                              meta={"race_id": race_id, "user_id": user_id, "score_data": score_data, "keyword": keyword})
            else:
                result = self.result_item_assembler(response)
                result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                         "crawlerType": self.name_second, "data": str(score_data)}
                yield result
        except Exception:
            self.logger.info(f"查找{keyword}的成绩时出错{response.url}：{traceback.format_exc()}")

    def parse_cert(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        keyword = response.meta.get("keyword", "")
        score_data = response.meta.get("score_data", {})
        try:
            self.logger.info(f"开始解析{keyword}的证书链接:{response.url}")
            if json.loads(response.text):
                cert_url = json.loads(response.text).get("data", {}).get("Img2Url", "")
                if cert_url:
                    yield Request(url=cert_url, callback=self.parse_pic, errback=self.err_parse, dont_filter=True,
                                  meta={"race_id": race_id, "keyword": keyword, "user_id": user_id,
                                        "score_data": score_data})
                else:
                    result = self.result_item_assembler(response)
                    result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                             "crawlerType": self.name_second, "data": str(score_data)}
                    yield result
            else:
                result = self.result_item_assembler(response)
                result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                         "crawlerType": self.name_second, "data": str(score_data)}
                yield result
        except Exception:
            self.logger.info(f"解析{keyword}的证书链接时出错{response.url}：{traceback.format_exc()}")

    def parse_pic(self, response):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        keyword = response.meta.get("keyword", "")
        score_data = response.meta.get("score_data", {})
        try:
            self.logger.info(f"开始下载{keyword}的证书：{response.url}")
            pic_md5 = calc_str_md5(response.url)
            pic_name = f"{keyword}_{pic_md5}.jpg"
            dir_path = os.path.join(file_path, "picture", self.name_first)
            if not os.path.exists(dir_path):
                os.makedirs(dir_path)
            save_path = os.path.join(dir_path, pic_name)
            if not os.path.exists(save_path):
                with open(save_path, "wb") as f:
                    f.write(response.body)
                upload_path = f"flow/{race_id}/{user_id}/pic/{pic_name}"
                upload_flag = upload_file(save_path, upload_path)
                if upload_flag:
                    self.logger.info(f"{save_path}上传成功：{upload_path}")
                else:
                    self.logger.info(f"{save_path}上传失败：{upload_path}")
                score_data["certImg"] = upload_path
                result = self.result_item_assembler(response)
                result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                         "crawlerType": self.name_second, "data": str(score_data)}
                yield result
        except Exception:
            self.logger.info(f"下载{keyword}证书时出错{response.url}：{traceback.format_exc()}")

    def parse_hdian(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        user_name = response.meta.get("user_name", "")
        race_no = response.meta.get("race_no", "")
        try:
            self.logger.info(f"开始解析{user_name}的证书链接")
            url = json.loads(response.text).get("Img2Url", "")
            if url:
                score_data = {"race_name": race_name, "itemName": "",
                              "name": user_name, "raceNo": race_no,
                              "pace": "", "distance": "", "shotScore": "", "score": ""}
                yield Request(url=url, callback=self.parse_pic, errback=self.err_parse, dont_filter=True,
                              meta={"race_id": race_id, "keyword": race_no, "user_id": user_id,
                                    "score_data": score_data})
            else:
                result = self.result_item_assembler(response)
                result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                         "crawlerType": self.name_second, "data": str({"msg": "未查到相关成绩信息"})}
                yield result
        except Exception:
            self.logger.info(f"解析racecode信息时出错{response.url}：{traceback.format_exc()}")

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")

