import pandas as pd
import requests
import time
import threading
import queue
import os
import logging
import re
from datetime import datetime
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler("ip_check.log"),
        logging.StreamHandler()
    ]
)

# 设置请求重试策略
retry_strategy = Retry(
    total=3,
    backoff_factor=0.3,
    status_forcelist=[500, 502, 503, 504],
)


def create_session():
    """创建一个新的请求会话"""
    session = requests.Session()
    adapter = HTTPAdapter(max_retries=retry_strategy)
    session.mount("http://", adapter)
    session.mount("https://", adapter)
    return session

def check_ip(ip, session=None):
    """检查IP是否可用，通过向百度发送请求并查看状态码"""
    # 简单的IP格式验证
    if not ip or pd.isna(ip) or not isinstance(ip, str):
        logging.warning(f"❌ 无效的IP格式: {ip}")
        return False, None
    
    # IP地址格式简单验证
    import re
    ip_pattern = re.compile(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$')
    if not ip_pattern.match(ip):
        logging.warning(f"❌ IP格式不正确: {ip}")
        return False, None
        
    if session is None:
        session = create_session()
    
    try:
        # 设置超时时间为5秒
        proxies = {
            "http": f"http://{ip}",
            "https": f"http://{ip}"
        }
        # 发送HEAD请求以减少数据传输
        response = session.head("http://www.baidu.com", proxies=proxies, timeout=5, allow_redirects=True)
        response_time = response.elapsed.total_seconds()
        # 检查最终响应状态码是否为200
        if response.status_code == 200:
            # 重要信息：使用INFO级别明确记录可用IP
            logging.info(f"✅ IP可用: {ip} (响应时间: {response_time:.2f}秒)")
            return True, response_time
        else:
            logging.warning(f"❌ IP {ip} 返回状态码: {response.status_code}")
            return False, response_time
    except requests.exceptions.ConnectTimeout:
        logging.warning(f"❌ IP {ip} 连接超时")
        return False, None
    except requests.exceptions.ReadTimeout:
        logging.warning(f"❌ IP {ip} 读取超时")
        return False, None
    except requests.exceptions.ProxyError:
        logging.warning(f"❌ IP {ip} 代理错误")
        return False, None
    except Exception as e:
        logging.error(f"❌ IP {ip} 检查失败: {str(e)}")
        # 任何异常都视为IP不可用
        return False, None

def worker(ip_queue, results_list, lock, progress_counter):
    """工作线程函数"""
    # 每个线程创建自己的session以避免并发问题
    session = create_session()
    
    while not ip_queue.empty():
        try:
            # 获取IP和索引
            index, total, ip = ip_queue.get(block=False)
            
            # 检查IP
            is_valid, response_time = check_ip(ip, session)
            
            # 更新进度计数器
            with progress_counter['lock']:
                progress_counter['value'] += 1
                current = progress_counter['value']
            
            # 打印进度
            with lock:
                current_available = len(results_list)
            if current % 10 == 0 or current == total:
                logging.info(f"进度: {current}/{total}，已发现可用IP: {current_available}")
            
            # 如果IP可用，添加到结果列表
            if is_valid:
                with lock:
                    results_list.append({
                        'ip': ip,
                        'status': '可用',
                        'response_time': response_time
                    })
                    # 每发现一个可用IP就立即保存
                    save_progress(results_list)
                    logging.info(f"✅ 已保存可用IP: {ip}，当前可用IP总数: {len(results_list)}")
            else:
                logging.debug(f"❌ IP {ip} 不可用")
                
            # 标记任务完成
            ip_queue.task_done()
            
            # 避免请求过于频繁
            time.sleep(0.2)
            
        except queue.Empty:
            break
        except Exception as e:
            logging.error(f"处理IP时出错: {str(e)}")
            if not ip_queue.empty():
                ip_queue.task_done()

def save_progress(results, temp_file="temp_results.xlsx"):
    """保存当前进度"""
    if results:
        try:
            result_df = pd.DataFrame(results)
            result_df.to_excel(temp_file, index=False)
            logging.info(f"进度已保存到临时文件: {temp_file}")
        except Exception as e:
            logging.error(f"保存进度失败: {str(e)}")

def main():
    start_time = datetime.now()
    logging.info("开始IP可用性检测...")
    
    # 读取Excel文件
    try:
        file_path = "f:\ip-pool\ip-detail.xlsx"
        logging.info(f"正在读取文件: {file_path}")
        df = pd.read_excel(file_path, header=None, names=['ip'])
        total_ips = len(df)
        logging.info(f"成功读取 {total_ips} 个IP地址")
        
        # 清理无效的IP地址
        valid_ips = []
        for _, row in df.iterrows():
            ip = row['ip']
            if ip and not pd.isna(ip) and isinstance(ip, str):
                valid_ips.append(ip.strip())
        
        logging.info(f"过滤后有效IP地址: {len(valid_ips)} 个")
    except Exception as e:
        logging.error(f"读取文件失败: {e}")
        return
    
    # 创建任务队列
    ip_queue = queue.Queue()
    for i, ip in enumerate(valid_ips):
        ip_queue.put((i, len(valid_ips), ip))
    
    # 用于存储结果的列表和锁
    results = []
    results_lock = threading.Lock()
    
    # 进度计数器
    progress = {'value': 0, 'lock': threading.Lock()}
    
    # 创建并启动工作线程
    num_threads = min(10, len(valid_ips))  # 根据IP数量动态调整线程数，最多10个线程
    threads = []
    
    logging.info(f"启动 {num_threads} 个工作线程...")
    
    for _ in range(num_threads):
        t = threading.Thread(target=worker, args=(ip_queue, results, results_lock, progress))
        t.daemon = True
        t.start()
        threads.append(t)
    
    # 定期保存进度
    save_interval = 30  # 每30秒保存一次进度
    last_save_time = time.time()
    
    try:
        # 等待所有任务完成
        while not ip_queue.empty():
            time.sleep(5)
            # 检查是否需要保存进度
            current_time = time.time()
            if current_time - last_save_time >= save_interval:
                save_progress(results)
                last_save_time = current_time
        
        # 等待所有线程完成
        for t in threads:
            t.join()
            
    except KeyboardInterrupt:
        logging.warning("检测被用户中断，正在保存已完成的结果...")
    finally:
        # 保存最终结果
        save_progress(results)
    
    # 将可用IP保存到新的Excel文件
    if results:
        # 按响应时间排序
        results.sort(key=lambda x: x['response_time'])
        
        result_df = pd.DataFrame(results)
        output_path = "f:\ip-pool\available_ips.xlsx"
        result_df.to_excel(output_path, index=False)
        
        end_time = datetime.now()
        duration = (end_time - start_time).total_seconds()
        
        logging.info("\n🎉 检测完成！")
        logging.info(f"总共检查了 {len(valid_ips)} 个IP")
        logging.info(f"找到 {len(results)} 个可用IP")
        logging.info(f"检测耗时: {duration:.2f} 秒")
        logging.info(f"可用IP已保存到: {output_path}")
        
        # 打印前10个最快的IP
        logging.info("\n响应速度最快的10个IP:")
        for i, ip_info in enumerate(results[:10], 1):
            logging.info(f"{i}. {ip_info['ip']} - 响应时间: {ip_info['response_time']:.2f}秒")
    else:
        logging.info("\n检测完成，但没有找到可用的IP")
    
    # 清理临时文件
    temp_file = "temp_results.xlsx"
    if os.path.exists(temp_file):
        try:
            os.remove(temp_file)
            logging.info(f"临时文件已清理: {temp_file}")
        except Exception as e:
            logging.warning(f"清理临时文件失败: {str(e)}")

if __name__ == "__main__":
    main()