#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/7/25 17:33
# @Author  : 王凯
# @File    : jsl_middlewares.py
# @Project : scrapy_spider
import hashlib
import json
import re

import execjs
from scrapy import Request
from scrapy.downloadermiddlewares.retry import RetryMiddleware


class JslMiddleware(RetryMiddleware):
    def process_request(self, request, spider):
        # Called for each request that goes through the downloader
        # middleware.

        # Must either:
        # - return None: continue processing this request
        # - or return a Response object
        # - or return a Request object
        # - or raise IgnoreRequest: process_exception() methods of
        #   installed downloader middleware will be called
        return None

    def gen_jsl_new_cookie(self, response, base_cookie=None, proxy=None, headers=None):
        """
        获取加速乐cookie
        :param response:
        :param base_cookie:
        :param proxy:
        :param headers:
        :return:
        """
        pattern = re.compile(r"<script>document.cookie=(.*?);location", re.S)
        resp = response.content.decode() if hasattr(response, 'content') else response.text
        js = re.findall(pattern, resp)
        data_str = re.findall(r";go\((.*?)\)</script>", resp)
        if js:
            __jsl_clearance_s = execjs.eval(js[0]).split(";")[0].split("=")[1]
            new_cookies = {**base_cookie, "__jsl_clearance_s": __jsl_clearance_s}
            return new_cookies
        if data_str:
            data = json.loads(data_str[0])
            __jsl_clearance_s = self._get_jsl_clearance_s(data["ct"], data["bts"], data["chars"], data["ha"])
            new_cookies = {**base_cookie, "__jsl_clearance_s": __jsl_clearance_s}
            return new_cookies

    def process_response(self, request, response, spider):
        # Called with the response returned from the downloader.

        # Must either;
        # - return a Response object
        # - return a Request object
        # - or raise IgnoreRequest
        response_cookies = {}
        request_cookies = {}
        if request.headers.get('Cookie'):
            request_cookies = dict([i.split('=') for i in request.headers.get('Cookie').decode().split("; ") if '=' in i])

        if response.headers.getlist("Set-Cookie"):
            for set_cookie in response.headers.getlist("Set-Cookie"):
                set_cookie_str = set_cookie.decode().split("; ")[0]
                if "=" in set_cookie_str:
                    response_cookies[set_cookie_str.split("=")[0]] = set_cookie_str.split("=")[1]

        new_cookies = self.gen_jsl_new_cookie(response, request_cookies, request.meta.get("proxy"), request.headers)
        if new_cookies:
            new_request: Request = request.copy()
            new_request.meta['jsl'] = True
            new_request.cookies.update(new_cookies)
            new_request.headers.update({"Cookie": '; '.join([f"{k}={v}" for k, v in new_cookies.items()])})
            return self._retry(new_request, "jsl_cookie", spider) or response
        return response

    @staticmethod
    def _get_jsl_clearance_s(ct, bts, chars, hash_name):
        """加速乐cookie加密逻辑"""
        chars_len = len(chars)

        def _hash(val: str, hash_name: str):
            """hash方法"""
            hash_func = getattr(hashlib, hash_name)
            return hash_func(val.encode()).hexdigest()

        for i in range(chars_len):
            for j in range(chars_len):
                jsl_clearance_s = bts[0] + chars[i] + chars[j] + bts[1]
                if _hash(jsl_clearance_s, hash_name) == ct:
                    return jsl_clearance_s

    def process_exception(self, request, exception, spider):
        # Called when a download handler or a process_request()
        # (from other downloader middleware) raises an exception.

        # Must either:
        # - return None: continue processing this exception
        # - return a Response object: stops process_exception() chain
        # - return a Request object: stops process_exception() chain
        pass
