#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import os.path
import sys
import random
from instock.lib.singleton_type import singleton_type

# 在项目运行时，临时将项目路径添加到环境变量
cpath_current = os.path.dirname(os.path.dirname(__file__))
cpath = os.path.abspath(os.path.join(cpath_current, os.pardir))
sys.path.append(cpath)
proxy_filename = os.path.join(cpath_current, 'config', 'proxy.txt')

__author__ = 'myh '
__date__ = '2025/1/6 '


# 读取代理
class proxys(metaclass=singleton_type):
    def __init__(self):
        self.data = []  # 初始化为空列表，避免 AttributeError
        try:
            with open(proxy_filename, "r") as file:
                # 过滤掉空行和注释行（以 # 开头的行）
                lines = [line.strip() for line in file.readlines() if line.strip() and not line.strip().startswith('#')]
                self.data = list(set(lines))
        except Exception:
           # 文件不存在或读取失败，使用空列表（直连模式）
           self.data = []

    def get_data(self):
        return self.data

    def get_proxies(self):
        # 如果没有配置代理，返回 None，requests 会使用直连方式
        if not self.data or len(self.data) == 0:
            return None

        proxy = random.choice(self.data)
        # 确保代理格式正确，如果只是 IP:PORT，添加 http:// 前缀
        if not proxy.startswith('http://') and not proxy.startswith('https://'):
            proxy = f"http://{proxy}"
        
        return {"http": proxy, "https": proxy}
    
    def get_headers(self):
        """
        获取禁用 keep-alive 的请求头
        注意：隧道代理不支持 keep-alive，必须禁用连接复用才能正常切换IP
        """
        return {
            'Connection': 'close',  # 禁用 keep-alive，确保每次请求使用新连接
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
            'Accept': '*/*',
            'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
        }

"""
    def get_proxies(self):
        if self.data is None:
            return None

        while len(self.data) > 0:
            proxy = random.choice(self.data)
            if https_validator(proxy):
                return {"http": proxy, "https": proxy}
            self.data.remove(proxy)

        return None


from requests import head
def https_validator(proxy):
    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
               'Accept': '*/*',
               'Connection': 'keep-alive',
               'Accept-Language': 'zh-CN,zh;q=0.8'}
    proxies = {"http": f"{proxy}", "https": f"{proxy}"}
    try:
        r = head("https://data.eastmoney.com", headers=headers, proxies=proxies, timeout=3, verify=False)
        return True if r.status_code == 200 else False
    except Exception as e:
        return False
"""