# -*- coding: utf-8 -*-

import os
from pathlib import Path
from typing import List, Dict, Any
from dataclasses import dataclass


@dataclass
class SpiderConfig:
    """单个爬虫配置"""
    name: str
    site_name: str
    allowed_domains: List[str] = None
    concurrent_requests: int = 16
    download_delay: float = 1.0
    custom_headers: Dict[str, str] = None
    use_proxy: bool = False
    max_pages: int = 0  # 0表示无限制


@dataclass
class GlobalConfig:
    """全局配置"""
    output_dir: str = "outputs"
    log_level: str = "CRITICAL"
    log_file: str = "spider.log"
    log_enabled: bool = False
    
    # 并发配置
    global_concurrent_requests: int = 32
    concurrent_requests_per_domain: int = 16
    concurrent_requests_per_ip: int = 16
    
    # 代理配置
    proxy_enabled: bool = False
    proxy_list: List[str] = None
    
    # 重试配置
    retry_times: int = 3
    retry_http_codes: List[int] = None
    
    # 缓存配置
    cache_enabled: bool = True
    cache_expiration: int = 3600
    
    # 文件大小限制
    jsonl_max_size: int = 1024 * 1024 * 1024  # 1GB
    jsonl_max_records: int = 50000
    image_max_count: int = 10000
    csv_max_records: int = 100000


class ConfigManager:
    """配置管理器"""
    
    def __init__(self, config_file: str = "spider_config.json"):
        self.config_file = Path(config_file)
        self.global_config = GlobalConfig()
        self.spider_configs: List[SpiderConfig] = []
        
        # 如果配置文件存在，加载配置
        if self.config_file.exists():
            self.load_config()
        else:
            self.create_default_config()
    
    def load_config(self):
        """从文件加载配置"""
        import json
        
        try:
            with open(self.config_file, 'r', encoding='utf-8') as f:
                config_data = json.load(f)
            
            # 加载全局配置
            if 'global' in config_data:
                global_data = config_data['global']
                for key, value in global_data.items():
                    if hasattr(self.global_config, key):
                        setattr(self.global_config, key, value)
            
            # 加载爬虫配置
            if 'spiders' in config_data:
                self.spider_configs = []
                for spider_data in config_data['spiders']:
                    config = SpiderConfig(**spider_data)
                    self.spider_configs.append(config)
                    
        except Exception as e:
            print(f"加载配置文件失败: {e}")
            self.create_default_config()
    
    def save_config(self):
        """保存配置到文件"""
        import json
        from dataclasses import asdict
        
        config_data = {
            'global': asdict(self.global_config),
            'spiders': [asdict(config) for config in self.spider_configs]
        }
        
        with open(self.config_file, 'w', encoding='utf-8') as f:
            json.dump(config_data, f, ensure_ascii=False, indent=2)
    
    def create_default_config(self):
        """创建默认配置"""
        # 设置默认的代理列表和重试状态码
        self.global_config.proxy_list = []
        self.global_config.retry_http_codes = [500, 502, 503, 504, 408, 429]
        
        # 创建北京消防协会模块化爬虫配置
        beijingfire_config = SpiderConfig(
            name="beijingfire",
            site_name="beijingfire_com",
            allowed_domains=["beijingfire.com", "www.beijingfire.com"],
            concurrent_requests=4,
            download_delay=2.0
        )
        
        # 创建伦敦消防站点配置
        londonfire_config = SpiderConfig(
            name="londonfire",
            site_name="londonfire_gov_uk",
            allowed_domains=["london-fire.gov.uk", "www.london-fire.gov.uk"],
            concurrent_requests=4,
            download_delay=2.0
        )
        
        # 创建CCCF站点配置
        cccf_config = SpiderConfig(
            name="cccf",
            site_name="cccf_com_cn",
            allowed_domains=["cccf.com.cn", "www.cccf.com.cn"],
            concurrent_requests=4,
            download_delay=2.0
        )
        
        # 创建NIST站点配置
        nist_config = SpiderConfig(
            name="nist",
            site_name="nist",
            allowed_domains=["nist.gov", "www.nist.gov"],
            concurrent_requests=4,
            download_delay=2.0
        )
        
        # 创建IAFF站点配置
        iaff_config = SpiderConfig(
            name="iaff",
            site_name="iaff_org",
            allowed_domains=["iaff.org", "www.iaff.org"],
            concurrent_requests=4,
            download_delay=2.0
        )
        
        # 创建SFPE站点配置
        sfpe_config = SpiderConfig(
            name="sfpe",
            site_name="sfpe_org",
            allowed_domains=["sfpe.org", "www.sfpe.org", 'www.pathlms.com'],
            concurrent_requests=4,
            download_delay=2.0
        )
        
        self.spider_configs = [beijingfire_config, londonfire_config, cccf_config, nist_config, iaff_config, sfpe_config]
        self.save_config()
    
    def add_spider_config(self, config: SpiderConfig):
        """添加爬虫配置"""
        self.spider_configs.append(config)
        self.save_config()
    
    def get_spider_config(self, name: str) -> SpiderConfig:
        """获取指定名称的爬虫配置"""
        for config in self.spider_configs:
            if config.name == name:
                return config
        return None
    
    def update_spider_config(self, name: str, **kwargs):
        """更新爬虫配置"""
        for config in self.spider_configs:
            if config.name == name:
                for key, value in kwargs.items():
                    if hasattr(config, key):
                        setattr(config, key, value)
                self.save_config()
                return True
        return False
    
    def remove_spider_config(self, name: str):
        """删除爬虫配置"""
        self.spider_configs = [
            config for config in self.spider_configs 
            if config.name != name
        ]
        self.save_config()
    
    def list_spider_configs(self) -> List[str]:
        """列出所有爬虫配置名称"""
        return [config.name for config in self.spider_configs]
    
    def get_scrapy_settings(self) -> Dict[str, Any]:
        """获取Scrapy设置字典"""
        settings = {
            'BOT_NAME': 'fire_control_spider',
            'SPIDER_MODULES': ['fire_control_spider.sites.beijingfire', 'fire_control_spider.sites.londonfire', 'fire_control_spider.sites.cccf', 'fire_control_spider.sites.nist', 'fire_control_spider.sites.iaff', 'fire_control_spider.sites.sfpe', 'fire_control_spider.sites.nfpa'],
            'NEWSPIDER_MODULE': 'fire_control_spider.sites.beijingfire',
            'ROBOTSTXT_OBEY': False,
            
            # 并发设置
            'CONCURRENT_REQUESTS': self.global_config.global_concurrent_requests,
            'CONCURRENT_REQUESTS_PER_DOMAIN': self.global_config.concurrent_requests_per_domain,
            'CONCURRENT_REQUESTS_PER_IP': self.global_config.concurrent_requests_per_ip,
            
            # 下载设置
            'DOWNLOAD_DELAY': 1,
            'DOWNLOAD_TIMEOUT': 180,
            'DOWNLOAD_DELAY_RANGE': (0.1, 0.5),  # 媒体下载延迟范围
            
            # 代理设置
            'PROXY_ENABLED': self.global_config.proxy_enabled,
            'PROXY_LIST': self.global_config.proxy_list or [],
            
            # 重试设置
            'RETRY_TIMES': self.global_config.retry_times,
            'RETRY_HTTP_CODES': self.global_config.retry_http_codes,
            
            # 缓存设置
            'HTTPCACHE_ENABLED': self.global_config.cache_enabled,
            'HTTPCACHE_EXPIRATION_SECS': self.global_config.cache_expiration,
            
            # 输出设置
            'OUTPUT_DIR': Path(self.global_config.output_dir),
            'JSONL_MAX_SIZE': self.global_config.jsonl_max_size,
            'JSONL_MAX_RECORDS': self.global_config.jsonl_max_records,
            'IMAGE_MAX_COUNT': self.global_config.image_max_count,
            'CSV_MAX_RECORDS': self.global_config.csv_max_records,
            
            # 日志设置 - 完全关闭所有日志
            'LOG_LEVEL': 'CRITICAL',
            'LOG_ENABLED': False,
            
            # Pipeline设置
            'ITEM_PIPELINES': {
                'fire_control_spider.pipelines.ValidationPipeline': 100,
                'fire_control_spider.pipelines.DuplicatesPipeline': 200,
                'fire_control_spider.pipelines.MediaDownloadPipeline': 300,
                'fire_control_spider.pipelines.JsonlWriterPipeline': 400,
            },
            
            # 去重设置
            'DUPEFILTER_CLASS': 'scrapy.dupefilters.RFPDupeFilter',
            
            # 中间件设置
            'DOWNLOADER_MIDDLEWARES': {
                'fire_control_spider.middlewares.ProxyMiddleware': 350,
                'fire_control_spider.middlewares.UserAgentMiddleware': 400,
                'fire_control_spider.middlewares.RetryMiddleware': 500,
            },
            
            # 扩展设置 - 禁用所有扩展
            'EXTENSIONS': {
                'scrapy.extensions.telnet.TelnetConsole': None,
                'scrapy.extensions.logstats.LogStats': None,
                'scrapy.extensions.corestats.CoreStats': None,
                'scrapy.extensions.memusage.MemoryUsage': None,
                'scrapy.extensions.memdebug.MemoryDebugger': None,
                'scrapy.extensions.closespider.CloseSpider': None,
                'scrapy.extensions.feedexport.FeedExporter': None,
                'fire_control_spider.extensions.StatsExtension': None,
            }
        }
        
        return settings 