# -*- coding:utf8 -*-
import json
import traceback
import os
import typing
from lxml import etree
from scrapy import Request, FormRequest
from .utmb_chrome import UtmbChrome
from squirrel_core.frame.spider_makaka import MakakaSpider

file_path = os.environ.get("FILE_PATH", "/")


class utmb_score(MakakaSpider):
    name = "utmb_score"
    serialNumber = ""

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        if data and isinstance(data, typing.MutableMapping):
            self.serialNumber = data.get("serialNumber", "")
            race_id = data.get("spider_config", {}).get("race_id", "")
            user_id = data.get("spider_config", {}).get("user_id", "")
            race_no = data.get("spider_config", {}).get("race_no", "")
            user_name = data.get("spider_config", {}).get("user_name", "")
            card_num = data.get("spider_config", {}).get("card_num", "")
            home_url = data.get("spider_config", {}).get("url", "")
            race_name = data.get("spider_config", {}).get("race_name", "").replace(" ", "")
            if not user_name and not race_no:
                self.logger.info("user_name和race_no必须存在一个")
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 203, "message": "user_name和race_no必须存在一个"})
                self.close_after_idle = True
                self.force_to_close_spider = True
            else:
                self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
                keyword = race_no or user_name
                yield Request(url=home_url, callback=self.parse, dont_filter=True, errback=self.err_parse,
                              meta={"race_id": race_id, "user_id": user_id, "race_name": race_name, "keyword": keyword,
                                    "user_name": user_name, "race_no": race_no})

    def parse(self, response, **kwargs):
        race_id = response.meta.get("race_id", "")
        user_id = response.meta.get("user_id", "")
        race_name = response.meta.get("race_name", "")
        keyword = response.meta.get("keyword", "")
        user_name = response.meta.get("user_name", "")
        race_no = response.meta.get("race_no", "")
        proxy_url = response.request.meta.get("proxy", "")
        proxy_pre = response.request.meta.get("proxy_pre", "")
        try:
            self.logger.info(f"查找{user_name}的成绩")
            if response.url.endswith("/"):
                url = f"{response.url}runners/{keyword}"
            else:
                url = f"{response.url}/runners/{keyword}"
            content = UtmbChrome().get_score(url, proxy_url, proxy_pre)
            if content:
                tree = etree.HTML(content)
                race_group = "".join(tree.xpath("//div[@class='index-category_category_icon__Om5_5']//img/@alt"))
                length = "".join(tree.xpath("//p[contains(text(), 'Distance')]/following-sibling::*//text()")).split("/")[0].strip()
                speed = "".join(tree.xpath("//p[contains(text(), 'speed')]/following-sibling::*//text()")).strip()
                score = "".join(tree.xpath("//p[contains(text(), 'Race Time')]/following-sibling::*//text()")).strip()
                pass

                score_data = {"race_name": race_name, "itemName": race_group, "name": user_name, "raceNo": race_no,
                              "speed": speed, "distance": length, "shotScore": "", "score": score}
                score_info = []
                div_list = tree.xpath("//div[@class='live-table_container__UHVYK']/div[@class='runner-timing-point_liveTableRow__6mmGm live-table_row__LB5WQ']")
                for div in div_list:
                    cp_name = "".join(div.xpath("./div[3]//text()"))
                    start_time = "".join(div.xpath("./div[4]//text()"))
                    stop_time = "".join(div.xpath("./div[5]//text()"))
                    speed = "".join(div.xpath("./div[6]//text()"))
                    cp_length = "".join(div.xpath("./div[7]//text()"))
                    score_info.append(
                        {"cp_name": cp_name, "start_time": start_time, "score": stop_time, "distance": cp_length, "pace": speed})
                if score_info:
                    score_info[-1]["cp_name"] = "FINISH"
                score_data["sectionScore"] = score_info
            else:
                score_data = {"msg": "未查到成绩"}
            result = self.result_item_assembler(response)
            result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                     "crawlerType": self.name_second, "data": str(score_data)}
            yield result
        except Exception:
            self.logger.info(f"查找{user_name}的成绩时出错{response.url}：{traceback.format_exc()}")

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")

