# -*- coding:utf8 -*-
import json
import copy
import traceback
import os
from scrapy import Request, FormRequest
from squirrel_core.commons.utils.tools import calc_str_md5
from squirrel_core.frame.spider_makaka import MakakaSpider
import base64
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
file_path = os.environ.get("FILE_PATH", "/")


class itra_runnername(MakakaSpider):
    name = "itra_runnername"
    find_url = "https://itra.run/api/runner/find"
    header = {
        "Host": "itra.run",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:138.0) Gecko/20100101 Firefox/138.0",
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
        "Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
        "Accept-Encoding": "gzip, deflate, br, zstd",
    }
    find_data = {"name": "", "start": "1", "count": "50"}
    specific_settings = {
        'COOKIES_ENABLED': True,
        'CONCURRENT_REQUESTS': 100,
        'HTTPERROR_ALLOWED_CODES': [302]
    }
    handle_httpstatus_list = [302]
    need_ssdbstore_dup = True
    serialNumber = ""

    def get_ext_requests_or_urls(self, data=None):
        if data and isinstance(data, str):
            data = json.loads(data)
        self.serialNumber = data.get("serialNumber", "")
        self.upload_procedure({"serialNumber": self.serialNumber, "code": 100, "message": "任务启动成功"})
        runner_name = data.get("spider_config", {}).get("runner_name", "")
        url = f"https://itra.run/Runners/FindARunner?name={runner_name}"
        yield Request(url=url, callback=self.parse, dont_filter=True, errback=self.err_parse,
                      headers=self.header, meta={"runner_name": runner_name})

    def parse(self, response, **kwargs):
        runner_name = response.meta.get("runner_name", "")
        token = response.xpath("//input[@name='__RequestVerificationToken']/@value").extract_first()
        cookie = "; ".join([i.decode().split(";")[0] for i in response.headers.getlist("Set-Cookie")])
        self.header["Cookie"] = cookie
        self.header["X-CSRF-TOKEN"] = token
        find_data = copy.deepcopy(self.find_data)
        find_data["name"] = runner_name
        yield FormRequest(url=self.find_url, headers=self.header, formdata=find_data,
                          meta={"find_data": find_data, 'dont_merge_cookies': True},
                          errback=self.err_parse, callback=self.parse_runner, dont_filter=True)

    def parse_runner(self, response):
        start_num = response.meta.get("start_num", 1)
        self.logger.info(f"开始解析列表,从第{start_num}开始，serialNumber:{self.serialNumber}")
        start_list = response.meta.get("start_list", [])
        runner_result_list = response.meta.get("runner_result_list", [])
        page_flag = response.meta.get("page_flag", True)
        find_data = response.meta.get("find_data", {})
        content = json.loads(response.text)
        response1 = content.get("response1", "")
        response2 = content.get("response2", "")
        response3 = content.get("response3", "")
        result = self.decrypt_string(response1, response2, response3)
        result_dict = json.loads(result)
        total = result_dict.get("ResultCount", 0)
        if page_flag:
            if total > 50:
                start_list = list(range(1, total, 50))[1:]
            else:
                start_list = []
        runner_list = result_dict.get("Results", [])
        runner_result_list.extend(runner_list)
        if start_list:
            s = start_list.pop(0)
            find_data["start"] = str(s)
            yield FormRequest(url=self.find_url, headers=self.header, formdata=find_data,
                              meta={"find_data": find_data, "page_flag": False, "runner_result_list": runner_result_list,
                                    "start_list": start_list, "start_num": s}, dont_filter=True,
                              errback=self.err_parse, callback=self.parse_runner)
        else:
            result = self.result_item_assembler(response)
            result['result_data'] = {"serialNumber": self.serialNumber, "webType": self.name_first, "crawlerType": self.name_second, "data": str(runner_result_list)}
            result["_dup_str"] = calc_str_md5(str(runner_result_list))
            yield result

    def decrypt_string(self, response1_base64, response2_base64, response3_base64):
        response1 = base64.b64decode(response1_base64)
        response2 = base64.b64decode(response2_base64)
        response3 = base64.b64decode(response3_base64)

        cipher = Cipher(
            algorithms.AES(response3),
            modes.CBC(response2),
            backend=default_backend()
        )
        decryptor = cipher.decryptor()

        decrypted = decryptor.update(response1) + decryptor.finalize()

        padding_length = decrypted[-1]
        if padding_length <= 16:
            decrypted = decrypted[:-padding_length]

        return decrypted.decode('utf-8')

    def err_parse(self, failure):
        self.logger.warning(f"请求失败：{failure.request.url},错误原因:{traceback.format_exc()}")
