# -*- coding:utf8 -*-
import json
import traceback
import os
import time
import pyssdb
from scrapy import Request, FormRequest
from squirrel_core.commons.utils.tools import calc_str_md5
from squirrel_core.frame.spider_makaka import MakakaSpider
import base64
from squirrel_core.commons.utils.get_config import get_config
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
file_path = os.environ.get("FILE_PATH", "/")


class itra_user(MakakaSpider):
    name = "itra_user"
    start_url = "https://itra.run/"
    find_url = "https://itra.run/api/runner/find"
    login_url = "https://itra.run/Account/Login"
    result_url = "https://itra.run/api/Race/GetRaceResultsData?runnerId={}&pageNumber={}&pageSize=10&raceYear=&categoryId="
    header = {
        "Host": "itra.run",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0",
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
        "Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
        "Accept-Encoding": "gzip, deflate, br, zstd",
    }

    login_header = {
        "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "accept-encoding": "gzip, deflate, br",
        "accept-language": "zh,zh-CN;q=0.9",
        "Connection": "keep-alive",
        "cache-control": "max-age=0",
        "content-type": "application/x-www-form-urlencoded",
        "upgrade-insecure-requests": "1",
        "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
        "Host": "itra.run"
    }
    find_data = {"name": "", "start": "1", "count": "10"}
    specific_settings = {
        'COOKIES_ENABLED': False,
        'HTTPERROR_ALLOWED_CODES': [302]
    }
    handle_httpstatus_list = [302]

    base_config = get_config(sections="base")
    ssdb_host = base_config.get('ssdb_host')
    ssdb_port = base_config.get('ssdb_port')
    need_ssdbstore_dup = True
    serialNumber = ""
    c = pyssdb.Client(host=ssdb_host, port=ssdb_port)
    i = 0
    while i < 3:
        try:
            cookie = c.qfront("itra_cookie")
            if cookie:
                break
            else:
                i += 1
                continue
        except:
            i += 1
            time.sleep(1)
            continue
        finally:
            c.disconnect()

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        self.serialNumber = data.get("serialNumber", "")
        self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
        email = data.get("spider_config", {}).get("email", "")
        pwd = data.get("spider_config", {}).get("password", "")
        runner_id = data.get("spider_config", {}).get("runner_id", "")
        # runner_url = f"https://itra.run/RunnerSpace/{runner_id}"
        # yield Request(url=runner_url, headers=self.login_header,
        #               meta={"runner_id": runner_id},
        #               errback=self.err_parse, callback=self.parse_runner_detail)
        yield Request(url=self.start_url, callback=self.parse, dont_filter=True, errback=self.err_parse,
                      headers=self.header, meta={"email": email, "pwd": pwd, "runner_id": runner_id})

    def parse(self, response, **kwargs):
        self.logger.info("开始获取cookie")
        runner_id = response.meta.get("runner_id", "")
        email = response.meta.get("email", "")
        pwd = response.meta.get("pwd", "")
        if self.cookie:
            self.login_header["Cookie"] = self.cookie
            runner_url = f"https://itra.run/RunnerSpace/{runner_id}"
            yield Request(url=runner_url, headers=self.login_header,
                          meta={"runner_id": runner_id},
                          errback=self.err_parse, callback=self.parse_runner_detail)
        else:
            self.logger.info("cookie为空")
        # cookie = "; ".join([i.decode().split(";")[0] for i in response.headers.getlist("Set-Cookie")])
        # self.login_header["Cookie"] = cookie
        # token = response.xpath("//input[@name='__RequestVerificationToken']/@value").extract_first()
        # login_data = {"Input.Email": email, "Input.Password": pwd, "__RequestVerificationToken": token}
        # yield FormRequest(url=self.login_url, headers=self.login_header, formdata=login_data,
        #                   meta={"token": token, "runner_id": runner_id},
        #                   errback=self.err_parse, callback=self.parse_login, dont_filter=True)
    #
    # def parse_login(self, response):
    #     runner_id = response.meta.get("runner_id", "")
    #     cookie = "; ".join([i.decode().split(";")[0] for i in response.headers.getlist("Set-Cookie")])
    #     self.login_header["Cookie"] = cookie
    #
    #     runner_url = f"https://itra.run/RunnerSpace/{runner_id}"
    #     yield Request(url=runner_url, headers=self.login_header,
    #                   meta={"runner_id": runner_id},
    #                   errback=self.err_parse, callback=self.parse_runner_detail)

    def parse_runner_detail(self, response):
        runner_id = response.meta.get("runner_id", "")
        try:
            self.logger.info(f"开始请求{runner_id}的详情")
            if response.status == 302:
                if "SessionExpired" in response.url:
                    self.logger.info(f"请求{runner_id}时会话过期，需重新登陆")
                    result_dict = {"serialNumber": self.serialNumber, "webType": self.name_first,
                                   "crawlerType": self.name_second, "data": "会话过期，请重新登陆"}
                    result = self.result_item_assembler(response)
                    result['result_data'] = result_dict
                    yield result
                else:
                    location_link = response.headers["Location"].decode("utf-8")
                    location_url = response.urljoin(location_link.strip())
                    yield Request(url=location_url, headers=self.login_header, meta={"runner_id": runner_id},
                                  errback=self.err_parse, callback=self.parse_runner_detail, dont_filter=True)
            else:
                runner_name = "".join(response.xpath("//div[@id='ProfilePic']//h4/text()").extract()).strip()
                sub_title_list = response.xpath("//div[@id='divShowResults']/p/text()").extract()
                sub_value_list = response.xpath("//div[@id='divShowResults']/p/span//text()").extract()
                sub_title_list = [s.strip() for s in sub_title_list]
                sub_title_list = list(filter(lambda x: x != "", sub_title_list))
                sub_title_list = [s.strip(":").replace(" ", "_") for s in sub_title_list]
                sub_value_list = [s.strip() for s in sub_value_list]
                sub_value_list = list(filter(lambda x: x != "", sub_value_list))
                base_info = dict(zip(sub_title_list, sub_value_list))
                base_info["runner_name"] = runner_name

                general_list = response.xpath("//div[@class='popover-inner']//text()").extract()
                general_list = list(filter(lambda x: x.strip() != "", general_list))
                general = "|".join(general_list)
                team_list = response.xpath("//div[@class='info-block d-flex align-items-center']//text()").extract()
                team_list = list(filter(lambda x: x.strip() != "", team_list))
                team_list = [t.strip() for t in team_list]
                if team_list:
                    team_dict = dict(zip(team_list[::2], team_list[1::2]))
                    base_info.update(team_dict)
                div_list = response.xpath("//div[@class='row mt_16']/div")
                stats_dict = {}
                for div in div_list:
                    title = "".join(div.xpath(".//h5/text()").extract()).strip()
                    value = "".join(div.xpath(".//h4/text()").extract()).strip()
                    stats_dict[title] = value

                div_list = response.xpath("//div[@class='col my-2 my-lg-0']")
                point_list = []
                for div in div_list:
                    title = "".join(div.xpath(".//p/text()").extract()).strip()
                    value = "".join(div.xpath(".//h4/text()").extract()).strip()
                    point_list.append(f"{title}{value}")

                div_list = response.xpath("//div[@class='col-md-12 my-2 my-lg-0 col-lg-4']")
                rank_dict = {}
                for div in div_list:
                    sub_dict = {}
                    title = "".join(div.xpath(".//h4//text()").extract()).strip()
                    me = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']//h5/text()").extract()).strip()
                    all = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']//h5/../p/text()").extract()).strip()
                    sub_dict[title] = {"Overall": f"{me}/{all}"}
                    me = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']/following-sibling::*[1]//h5/text()").extract()).strip()
                    all = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']/following-sibling::*[1]//h5/../p/text()").extract()).strip()
                    sub_dict[title].update({"Age Group": f"{me}/{all}"})
                    rank_dict.update(sub_dict)
                base_info.update({"Index": general, "Stats": stats_dict, "Points": point_list, "Ranking": rank_dict})
                result_url = self.result_url.format(runner_id, 1)
                yield Request(url=result_url, headers=self.login_header, meta={"base_info": base_info, "runner_id": runner_id},
                              errback=self.err_parse, callback=self.parse_runner_result, dont_filter=True)
        except Exception:
            self.logger.info(f"获取{runner_id}详情时出错：{traceback.format_exc()}")

    def parse_runner_result(self, response):
        runner_id = response.meta.get("runner_id", "")
        try:
            self.logger.info(f"获取{runner_id}的所有比赛结果")
            base_info = response.meta.get("base_info", {})
            page_no = response.meta.get("page_no", 1)
            race_result_list = response.meta.get("race_result_list", [])
            content = json.loads(response.text)
            response1 = content.get("response1", "")
            response2 = content.get("response2", "")
            response3 = content.get("response3", "")
            result = self.decrypt_string(response1, response2, response3)
            result_dict = json.loads(result)
            result_list = result_dict.get("RaceResults", [])
            if result_list:
                race_result_list.extend(result_list)
                page_no += 1
                result_url = self.result_url.format(runner_id, page_no)
                yield Request(url=result_url, headers=self.login_header,
                              meta={"base_info": base_info, "race_result_list": race_result_list,
                                    "page_no": page_no, "runner_id": runner_id},
                              errback=self.err_parse, callback=self.parse_runner_result, dont_filter=True)
            else:
                base_info.update({"race_result": race_result_list})

                result_dict = {"serialNumber": self.serialNumber, "webType": self.name_first,
                               "crawlerType": self.name_second, "data": str(base_info)}
                result = self.result_item_assembler(response)
                result['result_data'] = result_dict
                result["_dup_str"] = calc_str_md5(str(base_info))
                yield result
        except Exception:
            self.logger.info(f"获取{runner_id}比赛结果时出错：{traceback.format_exc()}")

    def decrypt_string(self, response1_base64, response2_base64, response3_base64):
        response1 = base64.b64decode(response1_base64)
        response2 = base64.b64decode(response2_base64)
        response3 = base64.b64decode(response3_base64)

        cipher = Cipher(
            algorithms.AES(response3),
            modes.CBC(response2),
            backend=default_backend()
        )
        decryptor = cipher.decryptor()

        decrypted = decryptor.update(response1) + decryptor.finalize()

        padding_length = decrypted[-1]
        if padding_length <= 16:
            decrypted = decrypted[:-padding_length]

        return decrypted.decode('utf-8')

    def err_parse(self, response):
        self.logger.warning(f"请求失败：{response.request.url},错误原因:{traceback.format_exc()}")
        result_dict = {"serialNumber": self.serialNumber, "webType": self.name_first,
                       "crawlerType": self.name_second, "data": str({"msg": "数据获取失败，请重试"})}
        result = self.result_item_assembler(response)
        result['result_data'] = result_dict
        yield result
