# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
import base64
import ctypes
import datetime
import random
import re
import time

import requests
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from faker import Faker
from retrying import retry
from scrapy import signals, Request
# useful for handling different item types with a single interface
from scrapy.exceptions import IgnoreRequest

from components.middlewares.downloadmiddlewares.private.rs_6_middlewares import Rs6RetryMiddleware
from components.middlewares.downloadmiddlewares.public.company_ip_by_api import CompanyIpByApiMiddleware
from components.middlewares.downloadmiddlewares.public.jsl_middlewares import JslMiddleware
from utils.geetest_utils.geetest4 import Geetest4
from utils.logs import log
from utils.proxies_tools import get_company_ip_crawler_by_api
from utils.tools import unquote_url, parse_url_params
from utils.user_pool import GuestUserPool, GuestUser
from utils.user_pool.base_user_pool import NormalUser


class GsxtSpiderMiddleware:
    # Not all methods need to be defined. If a method is not defined,
    # scrapy acts as if the spider middleware does not modify the
    # passed objects.

    @classmethod
    def from_crawler(cls, crawler):
        # This method is used by Scrapy to create your spiders.
        s = cls()
        crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
        return s

    def process_spider_input(self, response, spider):
        # Called for each response that goes through the spider
        # middleware and into the spider.

        # Should return None or raise an exception.
        return None

    def process_spider_output(self, response, result, spider):
        # Called with the results returned from the Spider, after
        # it has processed the response.

        # Must return an iterable of Request, or item objects.
        for i in result:
            yield i

    def process_spider_exception(self, response, exception, spider):
        # Called when a spider or process_spider_input() method
        # (from other spider middleware) raises an exception.

        # Should return either None or an iterable of Request or item objects.
        pass

    def process_start_requests(self, start_requests, spider):
        # Called with the start requests of the spider, and works
        # similarly to the process_spider_output() method, except
        # that it doesn’t have a response associated.

        # Must return only requests (not items).
        for r in start_requests:
            yield r

    def spider_opened(self, spider):
        spider.logger.info("Spider opened: %s" % spider.name)


class GsxtDownloaderMiddleware(Rs6RetryMiddleware, CompanyIpByApiMiddleware, JslMiddleware, GuestUserPool):
    # Not all methods need to be defined. If a method is not defined,
    # scrapy acts as if the downloader middleware does not modify the
    # passed objects.
    session = requests.Session()

    def __init__(self, settings):
        Rs6RetryMiddleware.__init__(self, settings)
        CompanyIpByApiMiddleware.__init__(self, settings)
        self.base_url = None
        self.shi_ming = False
        self.use_rs_api = settings.get("USE_RS_API", True)
        self.accounts = [
            {"username": "13920273623", "password": "wfq123456@"},
            {"username": "15558153087", "password": "wfq12345678@"},
            {"username": "15700095131", "password": "wfq12345@"},
            {"username": "18571817275", "password": "wfq1234567@"},
        ]
        GuestUserPool.__init__(self, redis_key="gsxt", min_users=1, redis_kwargs={"url": settings.get("REDIS_URL")}, keep_alive=False)
        self._redisdb.clear(self._tab_user_pool)

    def login(self):
        account = random.choice(self.accounts)
        if self.shi_ming:
            user = self._login_with_user(account['username'], account['password'])
        else:
            user = self._login_normal(proxy_type={"ip_pz": random.choice([True, False])})
        return user

    @retry(stop_max_attempt_number=5)
    def _request(self, **kwargs):
        allowed_methods_kwargs = [
            "params", "data", "headers", "cookies", "files", "auth", "timeout", "allow_redirects", "proxies", "hooks",
            "stream", "verify", "cert", "json"
        ]
        if (kwargs.get("retry_times") or 0) >= 10:
            print("retry times is too many")
            return None
        request_kwargs = {k: v for k, v in kwargs.items() if k in allowed_methods_kwargs}
        response = self.session.request(kwargs.get('method'), kwargs.get('url'), **request_kwargs)
        if response.status_code in [202, 412]:
            rs_cookie = self.gen_new_cookie(response, base_cookie=self.session.cookies.get_dict(), proxy=self.session.proxies, headers=self.session.headers)
            self.session.cookies.update(rs_cookie)
            raise Exception("rs_cookie")
        if response.status_code in [521]:
            jsl_cookie = self.gen_jsl_new_cookie(response, base_cookie=self.session.cookies.get_dict())
            self.session.cookies.update(jsl_cookie)
            raise Exception("jsl_cookie")
        return response

    def get_token(self, base_cookie=None, proxy=None, headers=None):
        if headers is None:
            headers = {
                "User-Agent": Faker().user_agent(),
            }
        if base_cookie is None:
            base_cookie = {}
        if isinstance(proxy, dict):
            proxies = proxy
        else:
            proxies = {
                "http": proxy,
                "https": proxy,
            }
        self.session.cookies.clear_session_cookies()
        self.session.cookies.update(base_cookie)
        self.session.proxies.update(proxies)
        self.session.headers.update(headers)
        entry_point = str(datetime.datetime.now().minute + datetime.datetime.now().second)
        geetest_image_url = f"{self.base_url}/corp-query-custom-geetest-image.gif?v=" + entry_point
        resp = self._request(url=geetest_image_url, method="GET").content.decode()
        js_code = "".join([chr(i) for i in eval(resp)])
        location_info = re.findall(r"location_info =(.\d+)", js_code)[0]
        token_mix = re.findall(r"\^\s*(.\d+)", js_code)[0]
        input_url = f"{self.base_url}/corp-query-geetest-validate-input.html?token=" + location_info
        resp = self._request(url=input_url, method="GET").content.decode()
        next_value = re.findall(r"value\s*:\s*(\d+)", "".join([chr(i) for i in eval(resp)]))[0]
        token = ctypes.c_int(int(next_value) ^ int(token_mix)).value
        return token

    def process_request(self, request, spider):
        # Called for each request that goes through the downloader
        # middleware.

        # Must either:
        # - return None: continue processing this request
        # - or return a Response object
        # - or return a Request object
        # - or raise IgnoreRequest: process_exception() methods of
        #   installed downloader middleware will be called
        self.base_url = getattr(spider, "base_url", 'https://www.gsxt.gov.cn')
        if self.base_url == "https://shiming.gsxt.gov.cn":
            self.shi_ming = True
        self.run()
        user = self.get_user()
        if user:
            new_proxy = user.proxies.get("http")
            request.meta["user"] = user.to_dict()
            request.meta["user_id"] = user.user_id
            request.meta["proxy"] = new_proxy
            request.meta["http_proxy"] = new_proxy
            request.meta["proxies_info"] = new_proxy  # 代理原始信息
            request.headers.update({"User-Agent": user.user_agent})
            user_cookies = {k: v for k, v in user.cookies.items() if v}
            request.headers.update({"Cookie": '; '.join([f"{k}={v}" for k, v in user_cookies.items() if v])})
            request.cookies.update(user_cookies)

        if 'geetest' in request.meta and request.meta['geetest']:
            self.process_geetest(request, spider)
        return None

    @retry(stop_max_attempt_number=5)
    def process_geetest(self, request, spider):
        root_url, params = parse_url_params(unquote_url(request.body.decode()))
        if 'captchaId' in params:
            proxies = request.meta.get("proxies_info") or {}
            if proxies and not isinstance(proxies, dict):
                proxies = {"https": proxies, "http": proxies}

            request_cookies = {}
            if request.headers.get('Cookie'):
                request_cookies = dict([i.split('=') for i in request.headers.get('Cookie').decode().split("; ") if '=' in i])
            cookie_dict = {**request_cookies, **getattr(request, "cookies", {})}

            # 激活cookie
            self._request(url=f"{self.base_url}/corp-query-homepage.html", method="GET", proxies=proxies, headers=request.headers.to_unicode_dict(), timeout=5)
            active_cookies = {**cookie_dict, **{k: v for k, v in self.session.cookies.items() if v}}
            request.headers.update({"Cookie": '; '.join([f"{k}={v}" for k, v in active_cookies.items() if v])})
            request.cookies.update(active_cookies)

            params.update({"token": self.get_token(base_cookie=active_cookies, proxy=proxies, headers=request.headers.to_unicode_dict())})
            validate_data = Geetest4(captcha_id=params['captchaId'], proxies=proxies, page_url=request.url).verify()
            ret = validate_data.get("seccode", {})
            if ret:
                ret["captchaId"] = ret.pop("captcha_id")
                params.update(ret)
                request._set_body('&'.join(f"{k}={v}" for k, v in params.items()).encode("utf-8"))
            else:
                raise Exception("验证失败")

    def process_response(self, request, response, spider):
        # Called with the response returned from the downloader.

        # Must either;
        # - return a Response object
        # - return a Request object
        # - or raise IgnoreRequest
        log.debug(f"{request.url} {response.status} {request.meta} {response.headers}")
        response = JslMiddleware.process_response(self, request, response, spider)
        if isinstance(response, Request):
            response.meta['init'] = False
            response.meta['geetest'] = False
            self._update_user_cookie(response, request, spider)
            return response
        response = Rs6RetryMiddleware.process_response(self, request, response, spider)
        if isinstance(response, Request):
            response.meta['init'] = False
            response.meta['geetest'] = False
            self._update_user_cookie(response, request, spider)
            return response
        if any([c in response.text for c in [
            "您的地址（",
            '{"NGIDERRORCODE":',
            "error_403",
        ]]):
            request.meta['init'] = True
            request.meta['geetest'] = True
            if 'user' in request.meta:
                self.del_user(request.meta["user"]['user_id'])
            return self._retry(request, 'ip最近有可疑的攻击行为', spider)
        if any([c in response.text for c in [
            "当前系统访问频繁",
        ]]):
            request.meta['init'] = True
            request.meta['geetest'] = True
            if 'user' in request.meta:
                self.del_user(request.meta["user"]['user_id'])
            return self._retry(request, '当前系统访问频繁', spider)
        if any([c in response.text for c in [
            "您的账号访问疑似夹带攻击行为",
            "您访问频率太高",
            "最近有可疑的攻击行为",
            "有疑似非正常访问行为",
        ]]):
            request.meta['init'] = True
            request.meta['geetest'] = True
            if 'user' in request.meta:
                self.del_user(request.meta["user"]['user_id'])
            return self._retry(request, '账号访问疑似夹带攻击', spider)
        if not response.text:
            request.meta['init'] = True
            request.meta['geetest'] = True
            if 'user' in request.meta:
                self.del_user(request.meta["user"]['user_id'])
            return self._retry(request, '空的返回体', spider)
        self._update_user_cookie(response, request, spider)
        if response is None:
            raise IgnoreRequest("重试最大次数")
        return response

    def process_exception(self, request, exception, spider):
        # Called when a download handler or a process_request()
        # (from other downloader middleware) raises an exception.

        # Must either:
        # - return None: continue processing this exception
        # - return a Response object: stops process_exception() chain
        # - return a Request object: stops process_exception() chain
        if 'user_id' in request.meta:
            self.del_user(request.meta["user_id"])
            request.meta.pop("user", None)
            request.meta['init'] = True
            request.meta['geetest'] = True
        return Rs6RetryMiddleware.process_exception(self, request, exception, spider)

    def _update_user_cookie(self, response, request, spider):
        if 'user' in request.meta:
            user = request.meta['user']
            response_cookies = {}
            request_cookies = {}
            if request.headers.get('Cookie'):
                request_cookies = dict([i.split('=') for i in request.headers.get('Cookie').decode().split("; ") if '=' in i])

            if hasattr(request, "cookies"):
                response_cookies.update(**request.cookies)

            if response.headers.getlist("Set-Cookie"):
                for set_cookie in response.headers.getlist("Set-Cookie"):
                    set_cookie_str = set_cookie.decode().split("; ")[0]
                    if "=" in set_cookie_str:
                        response_cookies[set_cookie_str.split("=")[0]] = set_cookie_str.split("=")[1]
            new_cookies = {**request_cookies, **response_cookies}
            user.update({"cookies": {k: v for k, v in new_cookies.items() if k not in ["gsxtBrowseHistory1"]}})
            self.add_user(GuestUser(**user))

    @staticmethod
    def rsa_encrypt(text: str, public_key=None):
        """
        对text 进行rsa加密
        """
        max_length = 117
        public_key = public_key.encode()
        public_key = serialization.load_pem_public_key(public_key, backend=default_backend())
        data = b''
        for i in range(0, len(text), max_length):
            data += public_key.encrypt(
                text[i: i + max_length].encode(),
                padding.PKCS1v15()
            )
        cipher_text = base64.b64encode(data)
        return cipher_text.decode()

    def _login_normal(self, proxy_type=None):
        if proxy_type is None:
            proxy_type = {}
        self.session.cookies.clear_session_cookies()
        self.session.proxies = get_company_ip_crawler_by_api(**proxy_type)
        user_agent = Faker().chrome()
        self.session.headers = {
            "User-Agent": user_agent,
            "Accept": "*/*",
            "Accept-Language": "zh-CN,zh;q=0.9",
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Pragma": "no-cache",
            "Upgrade-Insecure-Requests": "1",
            "Origin": self.base_url,
            "Referer": f"{self.base_url}/corp-query-search-1.html",
        }
        check_url = f"{self.base_url}/corp-query-search-test.html?searchword=91330100799655058B"
        try:
            self._request(method="GET", url=f"{self.base_url}/corp-query-homepage.html", retry_times=0, timeout=10, headers=self.session.headers)
            response = self._request(method="GET", url=check_url, retry_times=0, timeout=10, headers=self.session.headers)
            if response.text == 'true':
                self._request(method="GET", url=f"{self.base_url}/index", retry_times=0, timeout=10, headers=self.session.headers)
                return GuestUser(user_id=int(time.time() * 1000), user_agent=user_agent, proxies=self.session.proxies, cookies=self.session.cookies.get_dict())
            else:
                log.error(f"生产失败 {response} {response.text[-200:]}")
        except Exception as e:
            log.error(f"login error {e}")

    def _login_with_user(self, username, password):
        self.session.cookies.clear_session_cookies()
        log.info(f"用户 {username}:{password} {self.session.proxies} 尝试登录...")
        public_key = "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArqeSZx1T1/54PuJA29Km8eK/z0a5z9wrF4TjC3r6PjsVX9f9HRXy7TX1MTFkPJMI3BU/Yb/48IEn56AftwsAW3cDwPoSu9rvRdxf3tXcwtktbrTGuQHvvm82BAXN+04MQB+gy0MbXgNIIW6BZsDnI1FbV2fEx/ih0mhMG9FvHSB30Z/cUweklGYLjj5kMJ0C7wUNtM5vHXlfHGci079PuSmHrqrszfXZi0KWmahmRgZiViy6Q9lXqYnvTg4zvcWtSqEaxHtZ/DfG83ufxJP0AD6dLHFemTlZ83tMNm4IhFFeDmX5GQ9RVWKYwwDtIoHLtzyrzE4TUKmAA7eUo941zQIDAQAB"
        public_key = "-----BEGIN PUBLIC KEY-----\n" + public_key + "\n-----END PUBLIC KEY-----"
        url = f"{self.base_url}/socialuser-use-login-request.html"
        data = {
            "un": self.rsa_encrypt(username, public_key),
            "gp": self.rsa_encrypt(password, public_key),
        }
        proxies = get_company_ip_crawler_by_api(static=True)
        self.session.proxies = proxies
        headers = {"User-Agent": Faker().user_agent()}
        self.session.headers = headers

        validate_data = Geetest4(
            captcha_id='b608ae7850d2e730b89b02a384d6b9cc',
            proxies=proxies,
            page_url=f'{self.base_url}/corp-query-search-1.html'
        ).verify()

        ret = validate_data.get("seccode", {})
        if ret:
            ret["captchaId"] = ret.pop("captcha_id")
            data.update(ret)

        response = self._request(url=url, data=data, method="POST")
        ret = response.json()
        log.info(f"登录结果「{ret}」{response.cookies.get_dict()} {self.session.cookies.get_dict()}")
        # {'message': '用户名或密码错误！', 'success': True, 'value': '0'}
        # {'message': '', 'success': True, 'value': '1'}
        if ret.get("success") and ret.get("value") == "1":  # 成功登录并实名认证
            user = NormalUser(username=username, password=password)
            user.cookies = self.session.cookies.get_dict()
            user.proxies = self.session.proxies
            user.user_agent = response.request.headers.get("User-Agent")
            return user
        if ret.get("success") and ret.get("value") == "0":  # 用户名或密码错误
            pass
        if ret.get("success") and ret.get("value") == "2":  # 未实名
            pass
        else:
            raise Exception(f"登录出错了 {ret}")

    def spider_opened(self, spider):
        spider.logger.info("Spider opened: %s" % spider.name)
