# -*- coding: utf-8 -*-
"""
完整爬虫功能演示脚本
展示多城市+多业态+分页处理的组合功能
"""

import asyncio
import logging
from datetime import datetime
from src.crawler.crawler_engine import CrawlerEngine
from src.data.data_storage import DataStorageManager
from src.utils.excel_exporter import ExcelExporter


async def demo_full_crawler():
    """演示完整的爬虫功能"""
    
    # 设置日志
    logging.basicConfig(
        level=logging.INFO,
        format='%(asctime)s [%(levelname)s] %(message)s'
    )
    logger = logging.getLogger(__name__)
    
    # 初始化组件
    crawler_engine = CrawlerEngine()
    data_storage = DataStorageManager()
    excel_exporter = ExcelExporter()
    
    try:
        # 启动爬虫引擎
        await crawler_engine.start(headless=True)
        logger.info("🚀 爬虫引擎启动成功")
        
        # 配置爬取参数
        cities = ['深圳', '杭州', '北京']  # 3个城市
        business_types = ['餐饮', '零售', '娱乐']  # 3种业态
        max_pages = 2  # 每个组合最多爬取2页
        
        logger.info(f"📋 开始爬取: {len(cities)}个城市 × {len(business_types)}种业态 × {max_pages}页")
        
        # 🎯 核心方法调用 - 多城市+多业态+分页处理
        results = await crawler_engine.crawl_multiple_cities_and_business_types(
            cities=cities,
            business_types=business_types,
            max_pages=max_pages
        )
        
        # 处理结果
        total_data = []
        success_combinations = 0
        total_combinations = len(cities) * len(business_types)
        
        print("\n" + "="*80)
        print("📊 爬取结果汇总")
        print("="*80)
        
        for city, city_result in results['results'].items():
            print(f"\n🏙️ {city}市:")
            print(f"   总数据量: {city_result['total_data_count']} 条")
            print(f"   成功率: {city_result['success_rate']:.1f}%")
            
            for business_type, business_result in city_result['business_results'].items():
                if business_result['success']:
                    print(f"   ✅ {business_type}: {business_result['data_count']} 条")
                    total_data.extend(business_result['data'])
                    success_combinations += 1
                else:
                    print(f"   ❌ {business_type}: 失败 - {business_result.get('error', '未知错误')}")
        
        print(f"\n📈 总体统计:")
        print(f"   - 总数据量: {len(total_data)} 条")
        print(f"   - 成功组合: {success_combinations}/{total_combinations}")
        print(f"   - 总体成功率: {results['overall_success_rate']:.1f}%")
        
        # 保存数据到数据库
        if total_data:
            logger.info("💾 开始保存数据到数据库...")
            
            # 创建任务记录
            task_name = f"多城市多业态爬取_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
            task_id = data_storage.create_crawl_task(task_name, "多城市", "多业态")
            
            # 保存数据
            save_stats = data_storage.save_leads_data(total_data, task_id)
            
            print(f"\n💾 数据保存结果:")
            print(f"   - 新增: {save_stats['new']} 条")
            print(f"   - 更新: {save_stats['updated']} 条")
            print(f"   - 最终: {save_stats['final_count']} 条")
            
            # 导出Excel
            if save_stats['final_count'] > 0:
                timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
                filename = f"多城市多业态数据_{timestamp}.xlsx"
                excel_file = await excel_exporter.export_to_excel(total_data, filename)
                print(f"   - Excel文件: {excel_file}")
            
            # 更新任务状态
            data_storage.update_crawl_task_status(
                task_id, 'completed', 
                f"成功获取 {save_stats['final_count']} 条有效数据"
            )
        
        print("\n🎉 爬取任务完成!")
        return True
        
    except Exception as e:
        logger.error(f"❌ 爬取失败: {e}")
        return False
    finally:
        await crawler_engine.stop()


async def demo_single_city_multi_business():
    """演示单城市多业态爬取"""
    
    logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(message)s')
    logger = logging.getLogger(__name__)
    
    crawler_engine = CrawlerEngine()
    
    try:
        await crawler_engine.start(headless=True)
        logger.info("🚀 爬虫引擎启动成功")
        
        # 🎯 单城市多业态方法
        results = await crawler_engine.crawl_multiple_business_types(
            city="深圳",
            business_types=['餐饮', '零售', '娱乐'],
            max_pages=1
        )
        
        print(f"\n📊 深圳市多业态爬取结果:")
        print(f"   总数据量: {results['total_data_count']} 条")
        print(f"   成功率: {results['success_rate']:.1f}%")
        
        for business_type, result in results['results'].items():
            if result['success']:
                print(f"   ✅ {business_type}: {result['data_count']} 条")
            else:
                print(f"   ❌ {business_type}: 失败")
        
        return True
        
    except Exception as e:
        logger.error(f"❌ 爬取失败: {e}")
        return False
    finally:
        await crawler_engine.stop()


async def demo_multi_city_single_business():
    """演示多城市单业态爬取"""
    
    logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(message)s')
    logger = logging.getLogger(__name__)
    
    crawler_engine = CrawlerEngine()
    
    try:
        await crawler_engine.start(headless=True)
        logger.info("🚀 爬虫引擎启动成功")
        
        # 🎯 多城市单业态方法
        results = await crawler_engine.crawl_multiple_cities(
            cities=['深圳', '杭州', '北京'],
            business_type='零售',
            max_pages=1
        )
        
        print(f"\n📊 多城市零售业态爬取结果:")
        print(f"   总数据量: {results['total_data_count']} 条")
        print(f"   成功率: {results['success_rate']:.1f}%")
        
        for city, result in results['results'].items():
            if result['success']:
                print(f"   ✅ {city}: {result['data_count']} 条")
            else:
                print(f"   ❌ {city}: 失败")
        
        return True
        
    except Exception as e:
        logger.error(f"❌ 爬取失败: {e}")
        return False
    finally:
        await crawler_engine.stop()


if __name__ == "__main__":
    import argparse
    
    parser = argparse.ArgumentParser(description='完整爬虫功能演示')
    parser.add_argument('--mode', choices=['full', 'multi-business', 'multi-city'], 
                       default='full', help='演示模式')
    
    args = parser.parse_args()
    
    if args.mode == 'full':
        print("🚀 演示完整功能: 多城市+多业态+分页")
        success = asyncio.run(demo_full_crawler())
    elif args.mode == 'multi-business':
        print("🚀 演示单城市多业态功能")
        success = asyncio.run(demo_single_city_multi_business())
    elif args.mode == 'multi-city':
        print("🚀 演示多城市单业态功能")
        success = asyncio.run(demo_multi_city_single_business())
    
    exit(0 if success else 1)
