#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/3/26 15:09
# @Author  : 王凯
# @File    : siku_tools.py
# @Project : scrapy_spider
import hashlib
import json
import random
import threading
import time

import requests
from Crypto.Cipher import AES
from Crypto.Util.Padding import unpad
from faker import Faker

from utils.db.redisdb import RedisDB
from utils.geetest_utils.geetest3 import Geetest3
from utils.logs import logger
from utils.proxies_tools import get_company_ip_crawler_by_api


class SiKuTools:
    def __init__(self, timeout=10):
        self.redis_db = RedisDB()
        self.redis_key = "scrapy:siku:accessToken"
        self.timeout = timeout

    @staticmethod
    def decrypt(data):
        """
        解密
        :param data:
        :return
        """
        if "403 Forbidden" in data:
            return '{"data":{}}'
        cipher = AES.new(
            key=bytes("jo8j9wGw%6HbxfFn", encoding="utf-8"),
            mode=AES.MODE_CBC,
            iv=bytes("0123456789ABCDEF", encoding="utf-8"),
        )
        decrypted_data = cipher.decrypt(bytes.fromhex(data))
        result = unpad(decrypted_data, block_size=AES.block_size).decode("utf8")
        return result

    def get_captcha_validate_token(self, session=requests.Session()):
        """
        获取验证码
        :param session:
        :return:
        """

        if "python" in (session.headers.get("User-Agent") or ''):
            session.headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
            }
        url = "https://jzsc.mohurd.gov.cn/APi/webApi/geetest/startCaptcha"
        response = session.get(url, timeout=self.timeout)
        data = self.decrypt(response.text)
        resp = json.loads(data)["data"]
        verify_result = Geetest3(session=session).run(resp["gt"], resp["challenge"])
        params = {
            "geetest_challenge": verify_result["challenge"],
            "geetest_validate": verify_result["validate"],
            "geetest_seccode": verify_result["validate"] + "|jordan",
            "randomId": resp["randomId"],
        }
        response = session.get("https://jzsc.mohurd.gov.cn/APi/webApi/geetest/verifyLoginCode", params=params, timeout=self.timeout)
        result = json.loads(self.decrypt(response.text))
        assert result["code"] == 2000, result
        access_token = result["data"]["accessToken"]
        return {
            "proxies": session.proxies,
            "access_token": access_token,
            "User-Agent": session.headers['User-Agent'],
        }

    def get_access_token_from_cache(self):
        for _ in range(3):
            self.add_access_token_to_cache()
            datas = self.redis_db.zget(self.redis_key, count=30, is_pop=False)
            if datas:
                data = random.choice(datas)
                self.redis_db.zincrby(self.redis_key, 1, data)
                return json.loads(data)

    def add_access_token_to_cache(self):
        self.redis_db.set_expire(self.redis_key, 60 * 60 * 5)
        while self.redis_db.zget_count(self.redis_key) < 5:
            try:
                session = requests.Session()
                session.proxies = get_company_ip_crawler_by_api(static=False)
                session.headers = {"User-Agent": Faker().user_agent()}
                cache_data = self.get_captcha_validate_token(session)
                self.redis_db.zadd(self.redis_key, json.dumps(cache_data, ensure_ascii=False), 0)
                time.sleep(5)
            except Exception as e:
                logger.error(e)

    def get_ip_headers(self):
        return {
            "proxies": get_company_ip_crawler_by_api(static=False),
            "access_token": '',
            "User-Agent": Faker().user_agent(),
        }

    def remove(self, data, reason=None):
        logger.error(fr"remove {data} reason: {reason}")
        self.redis_db.zrem(self.redis_key, json.dumps(data, ensure_ascii=False))

    def run_forever(self):
        logger.info("start run_forever geetest pool")
        while True:
            if self.redis_db.zget_count(self.redis_key) < 10:
                try:
                    session = requests.Session()
                    session.proxies = get_company_ip_crawler_by_api(static=False)
                    session.headers = {"User-Agent": Faker().user_agent()}
                    cache_data = self.get_captcha_validate_token(session)
                    self.redis_db.zadd(self.redis_key, json.dumps(cache_data, ensure_ascii=False), 0)
                except Exception as e:
                    logger.error(e)
            else:
                time.sleep(5)


if __name__ == "__main__":
    self = SiKuTools()
    # print(self.get_access_token_from_cache())
    for _ in range(5):
        threading.Thread(target=self.run_forever).start()
