#!/usr/bin/env python3
"""
快速批量爬取启动器
基于 quick_task_config.yaml 的简化配置启动批量爬取任务
"""

import sys
import yaml
import argparse
from pathlib import Path

# 添加项目路径
sys.path.append(str(Path(__file__).parent))

from batch_crawl_launcher import BatchCrawlLauncher

class QuickBatchLauncher:
    """快速批量爬取启动器"""
    
    def __init__(self, quick_config_file: str = "config/quick_task_config.yaml"):
        self.quick_config_file = Path(quick_config_file)
        self.quick_config = self.load_quick_config()
        
    def load_quick_config(self) -> dict:
        """加载快速配置"""
        try:
            with open(self.quick_config_file, 'r', encoding='utf-8') as f:
                return yaml.safe_load(f)
        except Exception as e:
            print(f"❌ 快速配置文件加载失败: {e}")
            sys.exit(1)
    
    def generate_full_config(self) -> dict:
        """根据快速配置生成完整配置"""
        cities = self.quick_config['target_cities']
        categories = self.quick_config['target_categories']
        params = self.quick_config['crawl_params']
        
        # 计算预期数据量
        total_expected = 0
        city_tasks = []
        
        time_slots = ["morning", "afternoon", "evening"]
        
        for i, city in enumerate(cities):
            expected_count = len(categories) * params['pages_per_category'] * params['items_per_page']
            total_expected += expected_count
            
            city_task = {
                'city_name': city['name'],
                'city_code': city['code'],
                'cookie_file': city['cookie_file'],
                'target_categories': categories.copy(),
                'max_pages_per_category': params['pages_per_category'],
                'expected_data_count': expected_count,
                'execution_time_slot': time_slots[i % len(time_slots)]
            }
            city_tasks.append(city_task)
        
        # 生成任务信息
        city_names = [city['name'] for city in cities]
        task_info = {
            'name': f"{'+'.join(city_names)}专项爬取",
            'description': f"{len(cities)}个城市 × {len(categories)}个品类 × {params['pages_per_category']}页深度爬取",
            'target_data_count': total_expected,
            'estimated_duration': f"{len(cities) * 3}-{len(cities) * 4}小时",
            'created_date': "auto-generated"
        }
        
        # 生成Cookie配置
        cookie_config = {}
        for i, city in enumerate(cities, 1):
            cookie_id = f"cookie{i}"
            cookie_config[cookie_id] = {
                'file_path': city['cookie_file'],
                'description': f"{city['name']}专用Cookie",
                'daily_limit': params['max_daily_limit'],
                'bound_city': city['name']
            }
        
        # 生成完整配置
        full_config = {
            'task_info': task_info,
            'city_tasks': city_tasks,
            'cookie_config': cookie_config,
            'safety_config': self.get_default_safety_config(),
            'error_handling': self.get_default_error_handling(),
            'data_management': self.get_default_data_management(),
            'monitoring': self.get_default_monitoring(),
            'execution_plan': self.get_default_execution_plan(),
            'logging': self.get_default_logging()
        }
        
        return full_config
    
    def get_default_safety_config(self) -> dict:
        """获取默认安全配置"""
        return {
            'request_intervals': {
                'base_delay': 120,
                'random_factor': 0.5,
                'min_delay': 90,
                'max_delay': 180
            },
            'page_intervals': {
                'base_delay': 45,
                'random_factor': 0.3,
                'min_delay': 30,
                'max_delay': 60
            },
            'category_intervals': {
                'base_delay': 600,
                'random_factor': 0.2,
                'min_delay': 480,
                'max_delay': 720
            },
            'city_intervals': {
                'base_delay': 1800,
                'random_factor': 0.3,
                'min_delay': 1200,
                'max_delay': 2400
            }
        }
    
    def get_default_error_handling(self) -> dict:
        """获取默认错误处理配置"""
        return {
            'error_403': {
                'max_consecutive': 3,
                'cooling_time': 7200,
                'retry_after_cooling': True
            },
            'captcha': {
                'max_per_session': 2,
                'manual_solve_timeout': 300
            },
            'network_error': {
                'max_retries': 3,
                'retry_delay': 60
            },
            'success_rate': {
                'warning_threshold': 0.8,
                'critical_threshold': 0.6,
                'stop_threshold': 0.5
            }
        }
    
    def get_default_data_management(self) -> dict:
        """获取默认数据管理配置"""
        return {
            'history_data_dir': "D:/AI编程垂直小流量商业化/crawl_test/重构/文件",
            'output_data_dir': "data/batch_crawl_output",
            'deduplication': {
                'enabled': True,
                'check_fields': ["shop_name", "address", "phone"],
                'similarity_threshold': 0.9
            },
            'validation': {
                'min_fields_required': 5,
                'required_fields': ["shop_name", "category", "city"]
            }
        }
    
    def get_default_monitoring(self) -> dict:
        """获取默认监控配置"""
        return {
            'real_time': {
                'update_interval': 30,
                'display_progress': True,
                'log_level': "INFO"
            },
            'reporting': {
                'generate_hourly_report': True,
                'generate_final_report': True,
                'include_error_analysis': True
            },
            'alerts': {
                'enable_console_alerts': True,
                'enable_file_alerts': True,
                'alert_on_high_error_rate': True,
                'alert_on_low_success_rate': True
            }
        }
    
    def get_default_execution_plan(self) -> dict:
        """获取默认执行计划"""
        return {
            'schedule': {
                'start_time': "09:00",
                'city1_slot': "09:00-13:00",
                'break_time': "13:00-15:00",
                'city2_slot': "15:00-19:00"
            },
            'mode': "sequential",
            'auto_pause_on_error': True,
            'manual_intervention': True,
            'fallback': {
                'reduce_pages_on_error': True,
                'skip_problematic_categories': True,
                'emergency_stop_enabled': True
            }
        }
    
    def get_default_logging(self) -> dict:
        """获取默认日志配置"""
        return {
            'level': "INFO",
            'format': "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
            'files': {
                'main_log': "logs/batch_crawl_main.log",
                'error_log': "logs/batch_crawl_errors.log",
                'progress_log': "logs/batch_crawl_progress.log"
            },
            'rotation': {
                'max_size': "50MB",
                'backup_count': 5
            }
        }
    
    def save_generated_config(self, config: dict) -> str:
        """保存生成的配置文件"""
        config_file = "config/generated_batch_config.yaml"
        
        try:
            with open(config_file, 'w', encoding='utf-8') as f:
                yaml.dump(config, f, default_flow_style=False, allow_unicode=True, indent=2)
            
            print(f"✅ 配置文件已生成: {config_file}")
            return config_file
            
        except Exception as e:
            print(f"❌ 配置文件生成失败: {e}")
            sys.exit(1)
    
    def print_task_summary(self):
        """打印任务摘要"""
        cities = self.quick_config['target_cities']
        categories = self.quick_config['target_categories']
        params = self.quick_config['crawl_params']
        
        print("\n" + "="*60)
        print("📋 快速批量爬取任务摘要")
        print("="*60)
        
        print(f"🏙️ 目标城市: {', '.join([city['name'] for city in cities])}")
        print(f"🍽️ 目标品类: {', '.join(categories)}")
        print(f"📊 爬取深度: 每品类 {params['pages_per_category']} 页")
        
        total_expected = len(cities) * len(categories) * params['pages_per_category'] * params['items_per_page']
        print(f"📈 预期数据: {total_expected} 条")
        print(f"⏱️ 预估时间: {len(cities) * 3}-{len(cities) * 4} 小时")
        
        print("\n🍪 Cookie分配:")
        for city in cities:
            print(f"  {city['name']}: {city['cookie_file']}")
        
        print("="*60)
    
    def run(self):
        """运行快速批量爬取"""
        try:
            # 打印任务摘要
            self.print_task_summary()
            
            # 生成完整配置
            print("\n🔧 正在生成完整配置...")
            full_config = self.generate_full_config()
            
            # 保存配置文件
            config_file = self.save_generated_config(full_config)
            
            # 创建批量爬取启动器
            print("🚀 启动批量爬取任务...")
            launcher = BatchCrawlLauncher(config_file)
            
            # 运行任务
            return launcher.run()
            
        except Exception as e:
            print(f"❌ 快速启动失败: {e}")
            return 1

def main():
    """主函数"""
    parser = argparse.ArgumentParser(description='快速批量爬取启动器')
    parser.add_argument('--config', default='config/quick_task_config.yaml',
                       help='快速配置文件路径')
    
    args = parser.parse_args()
    
    launcher = QuickBatchLauncher(args.config)
    return launcher.run()

if __name__ == "__main__":
    sys.exit(main())
