import json
import requests
from ping3 import ping
from concurrent.futures import ThreadPoolExecutor, as_completed

# 设置代理 API 和本地文件路径
api_url = "https://www.proxy-list.download/HTTP"
file_path = "C:\\Users\\xiaoyh\\Downloads\\Proxy List.json"

def get_proxies_from_local_file(file_path):
    with open(file_path, "r", encoding="utf-8") as f:
        data = json.load(f)
    print(f"成功提取 {len(data)} 个代理 IP")
    return data

def get_proxies_from_url(url):
    try:
        response = requests.get(url, timeout=10)
        proxies = response.text.splitlines()
        print(f"从 API 获取到 {len(proxies)} 个代理 IP")
        return [{"IP": p.split(":")[0], "PORT": p.split(":")[1]} for p in proxies]
    except Exception as e:
        print(f"获取代理失败: {e}")
        return []

def fetch_proxies(api_url):
    try:
        return get_proxies_from_local_file(file_path)
    except Exception as e:
        print(f"加载代理列表时出错: {e}")
        return get_proxies_from_url(api_url)

def ping_ip(ip):
    """Ping 代理 IP 检查"""
    try:
        response = ping(ip, timeout=2)  # 设置超时时间
        if response is None:
            print(f"IP {ip} is not reachable.")
            return None
        else:
            print(f"IP {ip} is reachable with {response} ms ping.")
            return response
    except Exception as e:
        print(f"Ping IP {ip} 时出错: {e}")
        return None

def validate_proxy(proxy):
    """验证代理 IP 是否有效"""
    proxies_dict = {"http": f"http://{proxy['IP']}:{proxy['PORT']}",
                    "https": f"http://{proxy['IP']}:{proxy['PORT']}"}
    try:
        ping_time = ping_ip(proxy['IP'])
        if ping_time is None:
            return None  # 不可达的代理返回 None

        # 请求验证代理
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}
        response = requests.get("https://www.gushici.net", proxies=proxies_dict, headers=headers, timeout=10)
        if response.status_code == 200:
            print(f"代理可用: {proxies_dict}")
            return {"proxy": f"http://{proxy['IP']}:{proxy['PORT']}", "ping_time": ping_time}
        else:
            print(f"代理不可用（状态码: {response.status_code}）: {proxy}")
            return None
    except requests.exceptions.RequestException as e:
        print(f"代理不可用: {e} -> {proxy}")
        return None

def filter_best_proxies(valid_proxies, top_n=10):
    """根据代理的 ping 时间和可用性筛选出最优的代理"""
    if not valid_proxies:
        return []

    # 根据 ping_time 升序排序，ping 时间越低，代理越优
    valid_proxies.sort(key=lambda x: x['ping_time'])  # 按 ping 时间排序
    best_proxies = [proxy['proxy'] for proxy in valid_proxies[:top_n]]  # 返回最优的前 N 个代理

    return best_proxies

def get_proxy_pool():
    """获取有效代理池并筛选出最优代理"""
    proxy_list = fetch_proxies(api_url)
    print("代理 IP 列表:", proxy_list)

    valid_proxies = []
    with ThreadPoolExecutor(max_workers=15) as executor:
        futures = [executor.submit(validate_proxy, proxy) for proxy in proxy_list]
        for future in as_completed(futures):
            result = future.result()
            if result:
                valid_proxies.append(result)

    # 筛选出最优的代理池
    best_proxies = filter_best_proxies(valid_proxies)
    print("筛选出的最优代理池：", best_proxies)
    return best_proxies

if __name__ == '__main__':
    proxy_pool = get_proxy_pool()
    print("最终可用代理池：", proxy_pool)
