#!/usr/bin/env python3
"""
大众点评爬虫 P4 版本 - 配置管理模块
负责加载和管理所有配置信息
"""

import os
import yaml
from pathlib import Path
from typing import Dict, Any, Optional

class ConfigManager:
    """配置管理器"""
    
    def __init__(self, config_dir: Optional[str] = None):
        if config_dir is None:
            config_dir = Path(__file__).parent
        self.config_dir = Path(config_dir)
        self._cities_config = None
        self._app_config = None
        
    def load_cities_config(self) -> Dict[str, Any]:
        """加载城市配置"""
        if self._cities_config is None:
            cities_file = self.config_dir / "cities.yaml"
            try:
                print(f"[CONFIG] 尝试加载配置文件: {cities_file}")
                print(f"[CONFIG] 文件是否存在: {cities_file.exists()}")

                with open(cities_file, 'r', encoding='utf-8') as f:
                    content = f.read()
                    print(f"[CONFIG] 文件内容长度: {len(content)} 字符")

                    self._cities_config = yaml.safe_load(content)
                    print(f"[CONFIG] YAML解析结果: {type(self._cities_config)}")

                    if self._cities_config is None:
                        print("[CONFIG] ⚠️ YAML解析返回None，使用默认配置")
                        self._cities_config = {'cities': {}, 'crawl_settings': {}}

            except Exception as e:
                print(f"[CONFIG] ❌ 配置加载失败: {e}")
                self._cities_config = {'cities': {}, 'crawl_settings': {}}

        return self._cities_config
    
    def get_cities(self) -> Dict[str, Dict[str, Any]]:
        """获取城市列表"""
        config = self.load_cities_config()
        return config.get('cities', {})
    
    def get_city_info(self, city_name: str) -> Optional[Dict[str, Any]]:
        """获取指定城市信息"""
        cities = self.get_cities()
        return cities.get(city_name)
    
    def get_validated_cities(self) -> Dict[str, Dict[str, Any]]:
        """获取已验证的城市"""
        cities = self.get_cities()
        return {name: info for name, info in cities.items() if info.get('validated', False)}
    
    def get_crawl_settings(self) -> Dict[str, Any]:
        """获取爬取设置"""
        config = self.load_cities_config()
        return config.get('crawl_settings', {})
    
    def get_quality_standards(self) -> Dict[str, Any]:
        """获取数据质量标准"""
        config = self.load_cities_config()
        return config.get('quality_standards', {})
    
    def update_city_validation(self, city_name: str, validated: bool):
        """更新城市验证状态"""
        cities_file = self.config_dir / "cities.yaml"
        config = self.load_cities_config()
        
        if city_name in config['cities']:
            config['cities'][city_name]['validated'] = validated
            
            with open(cities_file, 'w', encoding='utf-8') as f:
                yaml.dump(config, f, default_flow_style=False, allow_unicode=True, indent=2)
            
            # 重新加载配置
            self._cities_config = None

# 应用配置
APP_CONFIG = {
    # Cookie配置 - 从环境变量或配置文件加载
    'cookie_string': os.getenv('DIANPING_COOKIE', ''),
    
    # 延迟配置（针对403风控优化）
    'delays': {
        'city_switch': 1800,  # 城市切换延迟30分钟
        'category_switch': 300,  # 类别切换延迟5分钟
        'page_turn': 90,  # 翻页延迟1.5分钟
        'normal': 60,  # 普通延迟1分钟
        'error_recovery': 300  # 错误恢复延迟5分钟
    },
    
    # 浏览器配置
    'browser': {
        'headless': False,
        'user_agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
        'viewport': {'width': 1920, 'height': 1080},
        'args': ['--no-sandbox', '--disable-blink-features=AutomationControlled']
    },
    
    # 数据配置
    'data': {
        'output_file': 'dianping_data_p4.csv',
        'core_fields': ['city', 'primary_category', 'secondary_category', 'shop_name', 'avg_price'],
        'backup_interval': 3600  # 1小时备份一次
    },
    
    # 日志配置
    'logging': {
        'level': 'INFO',
        'format': '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        'file': 'logs/crawler_p4.log'
    },
    
    # 稳定性配置
    'stability': {
        'max_consecutive_errors': 5,
        'error_recovery_delays': [30, 60, 120, 300, 600],
        'cookie_refresh_interval': 3600,
        'checkpoint_interval': 3600
    }
}

def get_config() -> Dict[str, Any]:
    """获取应用配置"""
    return APP_CONFIG

def get_cookie_string() -> str:
    """获取Cookie字符串 - 增强版，支持多种来源"""
    # 1. 优先从环境变量获取
    cookie = os.getenv('DIANPING_COOKIE')
    if cookie and cookie.strip():
        print(f"[CONFIG] 从环境变量加载Cookie，长度: {len(cookie)} 字符")
        return cookie.strip()

    # 2. 尝试从cookie.txt文件加载
    try:
        cookie_file = Path(__file__).parent / "cookie.txt"
        if cookie_file.exists():
            with open(cookie_file, 'r', encoding='utf-8') as f:
                content = f.read().strip()
                if content and not content.startswith('#'):  # 忽略注释行
                    print(f"[CONFIG] 从cookie.txt加载Cookie，长度: {len(content)} 字符")
                    return content
    except Exception as e:
        print(f"[CONFIG] 读取cookie.txt失败: {e}")

    # 3. 使用测试Cookie（用于开发和测试）
    print("[CONFIG] ⚠️ 使用测试Cookie，实际爬取可能失败")
    return "fspop=test; _lxsdk_cuid=test-crawler-p4; _lxsdk=test-crawler-p4"
