#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
验证三个关键优化是否正确实现
"""

def verify_timeout_optimization():
    """验证网络超时优化"""
    print("1️⃣ 验证网络超时优化...")
    
    try:
        # 检查browser_manager.py中的超时设置
        with open('src/crawler/browser_manager.py', 'r', encoding='utf-8') as f:
            content = f.read()
            
        # 检查关键超时配置
        checks = [
            ('页面默认超时', 'set_default_timeout(90000)' in content),
            ('页面导航超时', 'timeout=90000' in content),
            ('网络等待超时', 'timeout=90000' in content)
        ]
        
        for check_name, result in checks:
            status = "✅" if result else "❌"
            print(f"   {status} {check_name}: {'已优化' if result else '未优化'}")
        
        return all(result for _, result in checks)
        
    except Exception as e:
        print(f"   ❌ 检查失败: {e}")
        return False


def verify_date_filter():
    """验证日期过滤功能"""
    print("\n2️⃣ 验证日期过滤功能...")
    
    try:
        # 检查date_filter.py文件是否存在
        import os
        date_filter_exists = os.path.exists('src/utils/date_filter.py')
        print(f"   ✅ 日期过滤模块: {'存在' if date_filter_exists else '不存在'}")
        
        # 检查crawler_engine.py中的集成
        with open('src/crawler/crawler_engine.py', 'r', encoding='utf-8') as f:
            content = f.read()
            
        integrations = [
            ('导入日期过滤器', 'from src.utils.date_filter import DateFilter' in content),
            ('初始化日期过滤器', 'self.date_filter = DateFilter' in content),
            ('智能分页控制器', 'SmartPaginationController' in content),
            ('时间过滤调用', 'filter_data_by_date' in content)
        ]
        
        for check_name, result in integrations:
            status = "✅" if result else "❌"
            print(f"   {status} {check_name}: {'已集成' if result else '未集成'}")
        
        return date_filter_exists and all(result for _, result in integrations)
        
    except Exception as e:
        print(f"   ❌ 检查失败: {e}")
        return False


def verify_csv_export():
    """验证CSV导出功能"""
    print("\n3️⃣ 验证CSV导出功能...")
    
    try:
        # 检查csv_exporter.py文件是否存在
        import os
        csv_exporter_exists = os.path.exists('src/utils/csv_exporter.py')
        print(f"   ✅ CSV导出模块: {'存在' if csv_exporter_exists else '不存在'}")
        
        # 检查输出目录
        output_dir_exists = os.path.exists('output')
        if not output_dir_exists:
            os.makedirs('output', exist_ok=True)
            output_dir_exists = True
        print(f"   ✅ 输出目录: {'存在' if output_dir_exists else '不存在'}")
        
        # 检查crawler_engine.py中的集成
        with open('src/crawler/crawler_engine.py', 'r', encoding='utf-8') as f:
            content = f.read()
            
        integrations = [
            ('导入CSV导出器', 'from src.utils.csv_exporter import CSVExporter' in content),
            ('初始化CSV导出器', 'self.csv_exporter = CSVExporter' in content),
            ('数据显示管理器', 'DataDisplayManager' in content),
            ('CSV导出调用', 'export_to_csv' in content)
        ]
        
        for check_name, result in integrations:
            status = "✅" if result else "❌"
            print(f"   {status} {check_name}: {'已集成' if result else '未集成'}")
        
        return csv_exporter_exists and output_dir_exists and all(result for _, result in integrations)
        
    except Exception as e:
        print(f"   ❌ 检查失败: {e}")
        return False


def verify_pagination_logic():
    """验证分页逻辑重构"""
    print("\n4️⃣ 验证分页逻辑重构...")
    
    try:
        # 检查crawler_engine.py中的分页逻辑
        with open('src/crawler/crawler_engine.py', 'r', encoding='utf-8') as f:
            content = f.read()
            
        improvements = [
            ('智能分页控制', 'should_continue_pagination' in content),
            ('时间过滤集成', '时间过滤检查' in content),
            ('分页状态重置', 'pagination_controller.reset()' in content),
            ('过滤统计输出', 'filter_stats' in content)
        ]
        
        for check_name, result in improvements:
            status = "✅" if result else "❌"
            print(f"   {status} {check_name}: {'已实现' if result else '未实现'}")
        
        return all(result for _, result in improvements)
        
    except Exception as e:
        print(f"   ❌ 检查失败: {e}")
        return False


def main():
    """主验证函数"""
    print("🔍 验证三个关键优化的实现状态")
    print("=" * 60)
    
    results = []
    
    # 验证各项优化
    results.append(verify_timeout_optimization())
    results.append(verify_date_filter())
    results.append(verify_csv_export())
    results.append(verify_pagination_logic())
    
    # 输出总结
    print("\n" + "=" * 60)
    print("📊 验证结果总结:")
    
    optimization_names = [
        "网络超时优化",
        "日期过滤功能", 
        "CSV导出功能",
        "分页逻辑重构"
    ]
    
    for i, (name, result) in enumerate(zip(optimization_names, results)):
        status = "✅ 通过" if result else "❌ 失败"
        print(f"   {i+1}. {name}: {status}")
    
    success_count = sum(results)
    total_count = len(results)
    success_rate = success_count / total_count * 100
    
    print(f"\n🎯 总体验证结果: {success_count}/{total_count} 项通过 ({success_rate:.1f}%)")
    
    if success_rate == 100:
        print("🎉 所有优化都已正确实现!")
    elif success_rate >= 75:
        print("✅ 大部分优化已实现，少数需要调整")
    else:
        print("⚠️ 多项优化需要进一步完善")
    
    print("\n📋 使用说明:")
    print("   - 运行 python demo_full_crawler.py --mode full 测试完整功能")
    print("   - 运行 python test_optimized_crawler.py --test-type single 测试单城市")
    print("   - 查看 output/ 目录获取CSV导出文件")


if __name__ == "__main__":
    main()
