#!/usr/bin/env python3
"""
ScraperController使用示例
Example usage of ScraperController

演示如何使用ScraperController进行数据爬取
Demonstrates how to use ScraperController for data scraping
"""

import sys
import os
import json
from datetime import datetime

# 添加项目根目录到Python路径
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))

def main():
    """主函数"""
    print("ScraperController Usage Example")
    print("=" * 50)
    
    try:
        # 导入ScraperController
        from puxiansheng_scraper.scraper_controller import ScraperController
        
        # 初始化控制器（使用默认配置）
        print("Initializing ScraperController...")
        controller = ScraperController()
        
        # 显示配置信息
        print(f"Target cities: {controller.config.get_cities()}")
        print(f"Target business types: {controller.config.get_business_types()}")
        print(f"Request delay: {controller.config.get_request_delay()} seconds")
        print(f"Output directory: {controller.config.get('storage.output_dir')}")
        print(f"Output format: {controller.config.get('storage.format')}")
        print()
        
        # 获取初始统计信息
        initial_stats = controller.get_statistics()
        print("Initial statistics:")
        print(f"  Total combinations to process: {len(controller.config.get_cities()) * len(controller.config.get_business_types())}")
        print()
        
        # 运行爬虫（注意：这会进行实际的网络请求）
        print("Starting scraper...")
        print("Note: This will make actual HTTP requests to the target website.")
        print("Press Ctrl+C to cancel if you don't want to proceed.")
        
        # 给用户5秒时间取消
        import time
        for i in range(5, 0, -1):
            print(f"Starting in {i} seconds... (Ctrl+C to cancel)")
            time.sleep(1)
        
        print("\nStarting scraper execution...")
        
        # 运行爬虫
        final_stats = controller.run()
        
        # 显示最终统计信息
        print("\n" + "=" * 50)
        print("SCRAPING COMPLETED")
        print("=" * 50)
        
        print(f"Processing time: {final_stats['processing_time']:.2f} seconds")
        print(f"Total combinations: {final_stats['total_combinations']}")
        print(f"Successful combinations: {final_stats['successful_combinations']}")
        print(f"Failed combinations: {final_stats['failed_combinations']}")
        print(f"Success rate: {final_stats.get('success_rate', 0):.2%}")
        print()
        
        print(f"Total pages processed: {final_stats['total_pages_processed']}")
        print(f"Total items found: {final_stats['total_items_found']}")
        print(f"Total items saved: {final_stats['total_items_saved']}")
        print(f"Deduplication rate: {final_stats.get('dedup_rate', 0):.2%}")
        print()
        
        # 保存统计信息到文件
        stats_file = f"scraper_stats_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
        with open(stats_file, 'w', encoding='utf-8') as f:
            # 转换datetime对象为字符串以便JSON序列化
            stats_for_json = final_stats.copy()
            if stats_for_json['start_time']:
                stats_for_json['start_time'] = stats_for_json['start_time'].isoformat()
            if stats_for_json['end_time']:
                stats_for_json['end_time'] = stats_for_json['end_time'].isoformat()
            
            json.dump(stats_for_json, f, ensure_ascii=False, indent=2)
        
        print(f"Statistics saved to: {stats_file}")
        
        # 显示数据文件位置
        output_dir = controller.config.get('storage.output_dir', './data')
        print(f"Scraped data saved to: {output_dir}")
        
        return 0
        
    except KeyboardInterrupt:
        print("\nScraping cancelled by user.")
        return 1
    except Exception as e:
        print(f"Error: {e}")
        return 1

def demo_mode():
    """演示模式 - 只初始化和显示配置，不进行实际爬取"""
    print("ScraperController Demo Mode")
    print("=" * 50)
    
    try:
        from puxiansheng_scraper.scraper_controller import ScraperController
        
        # 初始化控制器
        controller = ScraperController()
        
        # 显示配置
        print("Configuration:")
        print(f"  Cities: {controller.config.get_cities()}")
        print(f"  Business types: {controller.config.get_business_types()}")
        print(f"  Request delay: {controller.config.get_request_delay()} seconds")
        print(f"  Request timeout: {controller.config.get_request_timeout()} seconds")
        print(f"  Retry times: {controller.config.get_retry_times()}")
        print(f"  Output format: {controller.config.get('storage.format')}")
        print(f"  Output directory: {controller.config.get('storage.output_dir')}")
        print(f"  Enable deduplication: {controller.config.get('storage.enable_dedup')}")
        
        # 显示统计信息结构
        stats = controller.get_statistics()
        print("\nStatistics structure:")
        for key, value in stats.items():
            if isinstance(value, dict):
                print(f"  {key}: {type(value).__name__} with {len(value)} items")
            else:
                print(f"  {key}: {value} ({type(value).__name__})")
        
        # 显示进度信息
        progress = controller.get_progress()
        print(f"\nProgress: {progress}")
        
        print("\nDemo completed successfully!")
        return 0
        
    except Exception as e:
        print(f"Demo failed: {e}")
        return 1

if __name__ == "__main__":
    if len(sys.argv) > 1 and sys.argv[1] == "--demo":
        sys.exit(demo_mode())
    else:
        print("Use --demo flag for demo mode (no actual scraping)")
        print("Run without flags for actual scraping")
        print()
        sys.exit(main())
