"""
Configuration Manager for ArXiv Scraper Service

Manages configuration loading, validation, and environment-specific settings.
"""

import os
import logging
from pathlib import Path
from typing import Dict, Any, Optional
import yaml
import json
from dataclasses import dataclass, asdict


@dataclass
class ArxivConfig:
    """ArXiv API configuration."""
    oai_base_url: str = "http://export.arxiv.org/oai2"
    earliest_date: str = "2007-01-01"
    subjects: list = None
    
    def __post_init__(self):
        if self.subjects is None:
            self.subjects = [
                "math-ph",  # Mathematical Physics
                "math.AP",  # Analysis of PDEs
                "math.NA",  # Numerical Analysis
                "math.OC",  # Optimization and Control
                "physics.comp-ph",  # Computational Physics
                "cs.NA"  # Numerical Analysis (CS)
            ]


@dataclass
class DatabaseConfig:
    """Database configuration."""
    connection_string: str = ""
    pool_size: int = 5
    max_overflow: int = 10
    query_timeout: int = 30


@dataclass
class ScrapingConfig:
    """Scraping behavior configuration."""
    rate_limit_delay: float = 3.0
    max_retries: int = 5
    request_timeout: int = 30
    batch_size: int = 100
    max_workers: int = 4


@dataclass
class ProcessingConfig:
    """Paper processing configuration."""
    batch_size: int = 50
    enable_text_processing: bool = True
    min_abstract_length: int = 50
    max_title_length: int = 500


@dataclass
class StateConfig:
    """State management configuration."""
    file_path: str = "./data/scraper_state.json"
    backup_enabled: bool = True
    backup_retention_days: int = 30


@dataclass
class LoggingConfig:
    """Logging configuration."""
    level: str = "INFO"
    format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    file_path: Optional[str] = None
    max_file_size: int = 10485760  # 10MB
    backup_count: int = 5


@dataclass
class MonitoringConfig:
    """Monitoring and metrics configuration."""
    enabled: bool = True
    metrics_file: str = "./data/metrics.json"
    health_check_interval: int = 300
    alert_thresholds: dict = None
    
    def __post_init__(self):
        if self.alert_thresholds is None:
            self.alert_thresholds = {
                "error_rate": 0.05,  # 5% error rate threshold
                "processing_time": 3600,  # 1 hour processing time threshold
                "memory_usage": 0.8  # 80% memory usage threshold
            }


@dataclass
class SchedulingConfig:
    """Scheduling configuration."""
    enabled: bool = False
    daily_run_time: str = "02:00"  # 2 AM UTC
    timezone: str = "UTC"
    max_run_duration: int = 7200  # 2 hours max run time


class ConfigManager:
    """
    Configuration manager with environment-specific settings and validation.
    
    Features:
    - YAML configuration file support
    - Environment variable override
    - Configuration validation
    - Default value management
    - Secrets management
    """
    
    def __init__(self, config_file_path: str = "config.yaml"):
        """Initialize configuration manager.
        
        Args:
            config_file_path: Path to configuration file
        """
        self.config_file_path = Path(config_file_path)
        self.logger = logging.getLogger(__name__)
        
        # Load configuration
        self._config = self._load_configuration()
        
        # Validate configuration
        self._validate_configuration()
        
        self.logger.info(f"Configuration loaded from {self.config_file_path}")
    
    def _load_configuration(self) -> Dict[str, Any]:
        """Load configuration from file and environment."""
        config = {}
        
        # Load from file if exists
        if self.config_file_path.exists():
            config = self._load_from_file()
        else:
            self.logger.warning(f"Configuration file not found: {self.config_file_path}")
            config = self._get_default_config()
        
        # Override with environment variables
        config = self._apply_environment_overrides(config)
        
        # Apply configuration objects
        config = self._create_config_objects(config)
        
        return config
    
    def _load_from_file(self) -> Dict[str, Any]:
        """Load configuration from YAML file."""
        try:
            with open(self.config_file_path, 'r', encoding='utf-8') as f:
                config = yaml.safe_load(f) or {}
            
            self.logger.info(f"Configuration loaded from file: {self.config_file_path}")
            return config
            
        except yaml.YAMLError as e:
            self.logger.error(f"Error parsing configuration file: {e}")
            raise ValueError(f"Invalid YAML configuration: {e}")
        except Exception as e:
            self.logger.error(f"Error loading configuration file: {e}")
            raise
    
    def _get_default_config(self) -> Dict[str, Any]:
        """Get default configuration."""
        return {
            'arxiv': asdict(ArxivConfig()),
            'database': asdict(DatabaseConfig()),
            'scraping': asdict(ScrapingConfig()),
            'processing': asdict(ProcessingConfig()),
            'state': asdict(StateConfig()),
            'logging': asdict(LoggingConfig()),
            'monitoring': asdict(MonitoringConfig()),
            'scheduling': asdict(SchedulingConfig())
        }
    
    def _apply_environment_overrides(self, config: Dict[str, Any]) -> Dict[str, Any]:
        """Apply environment variable overrides."""
        env_mappings = {
            # Database
            'DATABASE_URL': ['database', 'connection_string'],
            'DATABASE_POOL_SIZE': ['database', 'pool_size'],
            'DATABASE_MAX_OVERFLOW': ['database', 'max_overflow'],
            
            # ArXiv
            'ARXIV_OAI_URL': ['arxiv', 'oai_base_url'],
            'ARXIV_EARLIEST_DATE': ['arxiv', 'earliest_date'],
            
            # Scraping
            'SCRAPING_RATE_LIMIT': ['scraping', 'rate_limit_delay'],
            'SCRAPING_MAX_RETRIES': ['scraping', 'max_retries'],
            'SCRAPING_TIMEOUT': ['scraping', 'request_timeout'],
            'SCRAPING_BATCH_SIZE': ['scraping', 'batch_size'],
            
            # State
            'STATE_FILE_PATH': ['state', 'file_path'],
            
            # Logging
            'LOG_LEVEL': ['logging', 'level'],
            'LOG_FILE_PATH': ['logging', 'file_path'],
            
            # Monitoring
            'MONITORING_ENABLED': ['monitoring', 'enabled'],
            'METRICS_FILE': ['monitoring', 'metrics_file'],
            
            # Scheduling
            'SCHEDULING_ENABLED': ['scheduling', 'enabled'],
            'DAILY_RUN_TIME': ['scheduling', 'daily_run_time']
        }
        
        for env_var, config_path in env_mappings.items():
            value = os.getenv(env_var)
            if value is not None:
                # Navigate to nested config
                current = config
                for key in config_path[:-1]:
                    if key not in current:
                        current[key] = {}
                    current = current[key]
                
                # Set value with type conversion
                current[config_path[-1]] = self._convert_env_value(value)
        
        # Handle special cases
        if os.getenv('ARXIV_SUBJECTS'):
            subjects = os.getenv('ARXIV_SUBJECTS').split(',')
            config['arxiv']['subjects'] = [s.strip() for s in subjects]
        
        return config
    
    def _convert_env_value(self, value: str) -> Any:
        """Convert environment variable string to appropriate type."""
        # Boolean conversion
        if value.lower() in ('true', '1', 'yes', 'on'):
            return True
        elif value.lower() in ('false', '0', 'no', 'off'):
            return False
        
        # Numeric conversion
        try:
            if '.' in value:
                return float(value)
            else:
                return int(value)
        except ValueError:
            pass
        
        # Return as string
        return value
    
    def _create_config_objects(self, config: Dict[str, Any]) -> Dict[str, Any]:
        """Create configuration objects from dictionaries."""
        try:
            return {
                'arxiv': ArxivConfig(**config.get('arxiv', {})),
                'database': DatabaseConfig(**config.get('database', {})),
                'scraping': ScrapingConfig(**config.get('scraping', {})),
                'processing': ProcessingConfig(**config.get('processing', {})),
                'state': StateConfig(**config.get('state', {})),
                'logging': LoggingConfig(**config.get('logging', {})),
                'monitoring': MonitoringConfig(**config.get('monitoring', {})),
                'scheduling': SchedulingConfig(**config.get('scheduling', {}))
            }
        except TypeError as e:
            self.logger.error(f"Configuration validation error: {e}")
            raise ValueError(f"Invalid configuration: {e}")
    
    def _validate_configuration(self):
        """Validate configuration values."""
        # Validate database connection string
        if not self._config['database'].connection_string:
            self.logger.warning("Database connection string not configured")
        
        # Validate ArXiv URL
        if not self._config['arxiv'].oai_base_url.startswith('http'):
            raise ValueError("Invalid ArXiv OAI URL")
        
        # Validate rate limiting
        if self._config['scraping'].rate_limit_delay < 0:
            raise ValueError("Rate limit delay must be non-negative")
        
        # Validate file paths
        state_path = Path(self._config['state'].file_path)
        state_path.parent.mkdir(parents=True, exist_ok=True)
        
        # Validate log level
        valid_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
        if self._config['logging'].level not in valid_levels:
            raise ValueError(f"Invalid log level: {self._config['logging'].level}")
    
    def get_config(self) -> Dict[str, Any]:
        """Get complete configuration as dictionary."""
        return {
            'arxiv': asdict(self._config['arxiv']),
            'database': asdict(self._config['database']),
            'scraping': asdict(self._config['scraping']),
            'processing': asdict(self._config['processing']),
            'state': asdict(self._config['state']),
            'logging': asdict(self._config['logging']),
            'monitoring': asdict(self._config['monitoring']),
            'scheduling': asdict(self._config['scheduling'])
        }
    
    def get_arxiv_config(self) -> ArxivConfig:
        """Get ArXiv configuration."""
        return self._config['arxiv']
    
    def get_database_config(self) -> DatabaseConfig:
        """Get database configuration."""
        return self._config['database']
    
    def get_scraping_config(self) -> ScrapingConfig:
        """Get scraping configuration."""
        return self._config['scraping']
    
    def get_processing_config(self) -> ProcessingConfig:
        """Get processing configuration."""
        return self._config['processing']
    
    def get_state_config(self) -> StateConfig:
        """Get state configuration."""
        return self._config['state']
    
    def get_logging_config(self) -> LoggingConfig:
        """Get logging configuration."""
        return self._config['logging']
    
    def get_monitoring_config(self) -> MonitoringConfig:
        """Get monitoring configuration."""
        return self._config['monitoring']
    
    def get_scheduling_config(self) -> SchedulingConfig:
        """Get scheduling configuration."""
        return self._config['scheduling']
    
    def save_config_template(self, output_path: str = "config.template.yaml"):
        """Save configuration template file."""
        template_config = self._get_default_config()
        
        try:
            with open(output_path, 'w', encoding='utf-8') as f:
                yaml.dump(template_config, f, default_flow_style=False, 
                         allow_unicode=True, indent=2)
            
            self.logger.info(f"Configuration template saved to: {output_path}")
            
        except Exception as e:
            self.logger.error(f"Error saving configuration template: {e}")
            raise
    
    def validate_database_connection(self) -> bool:
        """Validate database connection string format."""
        conn_str = self._config['database'].connection_string
        
        if not conn_str:
            return False
        
        # Basic validation of PostgreSQL connection string
        required_parts = ['postgresql://', 'host=', 'dbname=']
        return any(part in conn_str for part in required_parts)
    
    def get_sensitive_config_mask(self) -> Dict[str, Any]:
        """Get configuration with sensitive values masked."""
        config = self.get_config()
        
        # Mask database connection string
        if config['database']['connection_string']:
            config['database']['connection_string'] = '***MASKED***'
        
        return config
    
    def reload_configuration(self):
        """Reload configuration from file."""
        try:
            self._config = self._load_configuration()
            self._validate_configuration()
            self.logger.info("Configuration reloaded successfully")
        except Exception as e:
            self.logger.error(f"Error reloading configuration: {e}")
            raise