# -*- coding:utf8 -*-
import json
import traceback
import os
from scrapy import Request, FormRequest
from squirrel_core.commons.utils.tools import calc_str_md5
from squirrel_core.item.procedure_item import ProcedureItem
from squirrel_core.frame.spider_makaka import MakakaSpider
import base64
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
file_path = os.environ.get("FILE_PATH", "/")


class ticket_japan(MakakaSpider):
    name = "ticket_japan"
    login_url = "https://ticket-sharing.com/login"
    check_url = "https://ticket-sharing.com/login_check"
    specific_settings = {
        'COOKIES_ENABLED': True,
        'HTTPERROR_ALLOWED_CODES': [302]
    }
    handle_httpstatus_list = [302]

    need_ssdbstore_dup = True
    serialNumber = ""

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        self.serialNumber = data.get("serialNumber", "")
        yield Request(url=self.login_url, callback=self.parse, dont_filter=True, errback=self.err_parse)

    def parse(self, response, **kwargs):
        cookie = "; ".join([i.decode().split(";")[0] for i in response.headers.getlist("Set-Cookie")])
        token = response.xpath("//input[@name='_csrf_token']/@value").extract_first()
        data = {
            "_csrf_token": token,
            "_target_path": "",
            "_username": "anniewu1210@gmail.com",
            "_password": "Jun960610",
            "remember_me": "1",
            "_submit": ""
        }
        header = {
            "Host": "ticket-sharing.com",
            "Content-Type": "application/x-www-form-urlencoded",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36",
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
            "Cookie": cookie
        }
        yield FormRequest(self.check_url, formdata=data, callback=self.parse_login, errback=self.err_parse, headers=header, dont_filter=True, meta={"header":header})


    def parse_login(self, response):
        header = response.meta.get("header", {})
        header["Referer"] = "https://ticket-sharing.com/"
        url = "https://ticket-sharing.com/information"
        yield Request(url=url, callback=self.parse_runner_detail, dont_filter=True, headers=header, errback=self.err_parse)

    def parse_runner_detail(self, response):
        runner_id = response.meta.get("runner_id", "")
        if response.status == 302:
            location_link = response.headers["Location"].decode("utf-8")
            location_url = response.urljoin(location_link.strip())
            yield Request(url=location_url, headers=self.login_header, meta={"runner_id": runner_id},
                          errback=self.err_parse, callback=self.parse_runner_detail, dont_filter=True)
        else:
            runner_name = "".join(response.xpath("//div[@id='ProfilePic']//h4/text()").extract()).strip()
            sub_title_list = response.xpath("//div[@id='divShowResults']/p/text()").extract()
            sub_value_list = response.xpath("//div[@id='divShowResults']/p/span//text()").extract()
            sub_title_list = [s.strip() for s in sub_title_list]
            sub_title_list = list(filter(lambda x: x != "", sub_title_list))
            sub_title_list = [s.strip(":").replace(" ", "_") for s in sub_title_list]
            sub_value_list = [s.strip() for s in sub_value_list]
            sub_value_list = list(filter(lambda x: x != "", sub_value_list))
            base_info = dict(zip(sub_title_list, sub_value_list))
            base_info["runner_name"] = runner_name

            general_list = response.xpath("//div[@class='popover-inner']//text()").extract()
            general_list = list(filter(lambda x: x.strip() != "", general_list))
            general = "|".join(general_list)
            team_list = response.xpath("//div[@class='info-block d-flex align-items-center']//text()").extract()
            team_list = list(filter(lambda x: x.strip() != "", team_list))
            team_list = [t.strip() for t in team_list]
            if team_list:
                team_dict = dict(zip(team_list[::2], team_list[1::2]))
                base_info.update(team_dict)
            div_list = response.xpath("//div[@class='row mt_16']/div")
            stats_dict = {}
            for div in div_list:
                title = "".join(div.xpath(".//h5/text()").extract()).strip()
                value = "".join(div.xpath(".//h4/text()").extract()).strip()
                stats_dict[title] = value

            div_list = response.xpath("//div[@class='col my-2 my-lg-0']")
            point_list = []
            for div in div_list:
                title = "".join(div.xpath(".//p/text()").extract()).strip()
                value = "".join(div.xpath(".//h4/text()").extract()).strip()
                point_list.append(f"{title}{value}")

            div_list = response.xpath("//div[@class='col-md-12 my-2 my-lg-0 col-lg-4']")
            rank_dict = {}
            for div in div_list:
                sub_dict = {}
                title = "".join(div.xpath(".//h4//text()").extract()).strip()
                me = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']//h5/text()").extract()).strip()
                all = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']//h5/../p/text()").extract()).strip()
                sub_dict[title] = {"Overall": f"{me}/{all}"}
                me = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']/following-sibling::*[1]//h5/text()").extract()).strip()
                all = "".join(div.xpath(".//div[@class='col-6 border-end py_16 px_24']/following-sibling::*[1]//h5/../p/text()").extract()).strip()
                sub_dict[title].update({"Age Group": f"{me}/{all}"})
                rank_dict.update(sub_dict)
            base_info.update({"Index": general, "Stats": stats_dict, "Points": point_list, "Ranking": rank_dict})
            result_url = self.result_url.format(runner_id, 1)
            yield Request(url=result_url, headers=self.login_header, meta={"base_info": base_info, "runner_id": runner_id},
                          errback=self.err_parse, callback=self.parse_runner_result, dont_filter=True)

    def parse_runner_result(self, response):
        runner_id = response.meta.get("runner_id", "")
        base_info = response.meta.get("base_info", {})
        page_no = response.meta.get("page_no", 1)
        race_result_list = response.meta.get("race_result_list", [])
        content = json.loads(response.text)
        response1 = content.get("response1", "")
        response2 = content.get("response2", "")
        response3 = content.get("response3", "")
        result = self.decrypt_string(response1, response2, response3)
        result_dict = json.loads(result)
        result_list = result_dict.get("RaceResults", [])
        if result_list:
            race_result_list.extend(result_list)
            page_no += 1
            result_url = self.result_url.format(runner_id, page_no)
            yield Request(url=result_url, headers=self.login_header,
                          meta={"base_info": base_info, "race_result_list": race_result_list,
                                "page_no": page_no, "runner_id": runner_id},
                          errback=self.err_parse, callback=self.parse_runner_result, dont_filter=True)
        else:
            base_info.update({"race_result": race_result_list})
            


    def decrypt_string(self, response1_base64, response2_base64, response3_base64):
        response1 = base64.b64decode(response1_base64)
        response2 = base64.b64decode(response2_base64)
        response3 = base64.b64decode(response3_base64)

        cipher = Cipher(
            algorithms.AES(response3),
            modes.CBC(response2),
            backend=default_backend()
        )
        decryptor = cipher.decryptor()

        decrypted = decryptor.update(response1) + decryptor.finalize()

        padding_length = decrypted[-1]
        if padding_length <= 16:
            decrypted = decrypted[:-padding_length]

        return decrypted.decode('utf-8')

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")
