#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/1/30 15:14
# @Author  : 王凯
# @File    : proxies_tools.py
# @Project : scrapy_spider
import random
import time

import requests

from project_setting import SQUID_URL, PINZHAN_IP_REDIS_URL
from utils.db.redisdb import RedisDB
from utils.logs import logger
from utils.redis_lock import RedisLock

redisdb = None


def get_redisdb():
    global redisdb
    if not redisdb:
        redisdb = RedisDB(url=PINZHAN_IP_REDIS_URL, max_connections=1)
    return redisdb


def get_company_ip_crawler_by_api(static=False, ip_pz=False, pool_size=10):
    try:
        if ip_pz:
            return get_pinzhan_ip_crawler(static=static, pool_size=pool_size)
        response = requests.get(
            url=f'{SQUID_URL}get_ip?token=6a154b2e-f8e6-11eb-9276-d89ef30b7cde{"&crawler=true" if static else ""}'
        ).json()
        _proxies = {
            "http": f'http://squid:70226ff9ff818edbd816e8c06b76ef97@{response["data"]}',
            "https": f'http://squid:70226ff9ff818edbd816e8c06b76ef97@{response["data"]}',
        }
        return _proxies
    except Exception as e:
        logger.error(f"IP错误 : {e}")
    return {}


def get_pinzhan_ip_crawler(pool_size=10, static=False):
    now_time = int(time.time() * 1000)
    get_redisdb().zremrangebyscore("ip_pz", "-inf", now_time)  # 删除失效的ip
    add_pinzhan_ip(pool_size=pool_size)  # 添加ip到redis中
    ip_list = get_redisdb().zget("ip_pz", is_pop=False, count=100)
    if ip_list:
        ip_str = random.choice(ip_list)
        return {
            "http": ip_str,
            "https": ip_str,
        }
    else:
        response = requests.get(
            url=f'{SQUID_URL}get_ip?token=6a154b2e-f8e6-11eb-9276-d89ef30b7cde{"&crawler=true" if static else ""}'
        ).json()
        _proxies = {
            "http": f'http://squid:70226ff9ff818edbd816e8c06b76ef97@{response["data"]}',
            "https": f'http://squid:70226ff9ff818edbd816e8c06b76ef97@{response["data"]}',
        }
        return _proxies


def add_pinzhan_ip(pool_size=10):
    with RedisLock(key="add_pinzhan_ip", lock_timeout=3600, wait_timeout=0) as _lock:
        if _lock.locked:
            if get_redisdb().zget_count("ip_pz") < pool_size:
                try:
                    url = "https://service.ipzan.com/core-extract?num=1&no=20240205735843168044&minute=5&format=json&pool=quality&mode=auth&secret=f61gnajtj0fvcp"
                    for _ in range(3):
                        try:
                            ip_data = requests.get(url).json()
                            assert ip_data.get("code") == 0, ip_data
                            ip_one = ip_data.get("data").get("list", [])
                            ip = ip_one[0].get("ip")
                            port = ip_one[0].get("port")
                            account = ip_one[0].get("account")
                            password = ip_one[0].get("password")
                            _proxies_key = f"http://{account}:{password}@{ip}:{port}"
                            # _proxies_key = f"socks5://{account}:{password}@{ip}:{port}"
                            _proxies = {"http": _proxies_key, "https": _proxies_key}
                            try:
                                now_time = int(time.time() * 1000)
                                get_redisdb().zadd(
                                    "ip_pz", _proxies_key, prioritys=now_time + (5 * 60 - 10) * 1000
                                )  # 提前10s失效
                                get_redisdb().zremrangebyscore("ip_pz", "-inf", now_time)  # 删除失效的ip
                            except Exception as e:
                                print(f"IP缓存错误 : {e}")
                            return _proxies
                        except Exception as e:
                            print(f"IP错误 : {e}")
                            continue
                except Exception as e:
                    print(f"IP错误 : {e}")


if __name__ == "__main__":
    print(get_company_ip_crawler_by_api(ip_pz=True))
