"""
Configuration management for HDC operations.

This module provides configuration classes and utilities for managing
HDC system parameters, performance settings, and operational constraints.
"""

import yaml
import toml
import json
from pathlib import Path
from typing import Dict, Any, Optional, Union, List
from dataclasses import dataclass, asdict
import logging

logger = logging.getLogger(__name__)


@dataclass
class HDCConfig:
    """
    Configuration class for HDC operations and system parameters.
    
    This class encapsulates all configurable parameters for the HDC system,
    including vector dimensions, operation parameters, and performance settings.
    """
    
    # Vector Configuration
    vector_dimension: int = 10000
    min_dimension: int = 5000
    max_dimension: int = 15000
    
    # HDC Operation Parameters
    bundling_threshold: float = 0.5
    cleanup_threshold: float = 0.7
    max_similarity_orthogonal: float = 0.1
    permutation_seed_base: int = 42
    
    # Random Vector Generation
    default_distribution: str = 'bipolar'  # 'bipolar', 'gaussian', 'uniform'
    random_seed: Optional[int] = None
    
    # Performance Settings
    use_numba_acceleration: bool = True
    fft_backend: str = 'scipy'  # 'scipy', 'numpy'
    parallel_operations: bool = True
    max_workers: Optional[int] = None
    
    # Memory Management
    vector_cache_size: int = 1000
    enable_memory_mapping: bool = False
    memory_limit_mb: int = 4096
    
    # Accuracy and Quality Settings
    trait_reconstruction_accuracy: float = 0.95
    noise_robustness_level: float = 0.8
    orthogonality_validation: bool = True
    
    # Logging and Debugging
    log_level: str = 'INFO'
    enable_performance_profiling: bool = False
    validate_operations: bool = True
    
    # System Constraints
    max_bundling_vectors: int = 100
    max_cleanup_iterations: int = 10
    operation_timeout_seconds: float = 30.0
    
    def __post_init__(self):
        """Validate configuration after initialization."""
        self._validate_config()
    
    def _validate_config(self):
        """Validate configuration parameters."""
        # Validate vector dimension
        if not (self.min_dimension <= self.vector_dimension <= self.max_dimension):
            raise ValueError(
                f"Vector dimension {self.vector_dimension} must be between "
                f"{self.min_dimension} and {self.max_dimension}"
            )
        
        # Validate thresholds
        if not (0.0 <= self.bundling_threshold <= 1.0):
            raise ValueError("bundling_threshold must be between 0.0 and 1.0")
        
        if not (0.0 <= self.cleanup_threshold <= 1.0):
            raise ValueError("cleanup_threshold must be between 0.0 and 1.0")
        
        if not (0.0 <= self.max_similarity_orthogonal <= 1.0):
            raise ValueError("max_similarity_orthogonal must be between 0.0 and 1.0")
        
        # Validate accuracy requirements
        if not (0.0 <= self.trait_reconstruction_accuracy <= 1.0):
            raise ValueError("trait_reconstruction_accuracy must be between 0.0 and 1.0")
        
        if self.trait_reconstruction_accuracy < 0.95:
            logger.warning(
                f"trait_reconstruction_accuracy {self.trait_reconstruction_accuracy} "
                "is below recommended minimum of 0.95"
            )
        
        # Validate distribution type
        valid_distributions = ['bipolar', 'gaussian', 'uniform']
        if self.default_distribution not in valid_distributions:
            raise ValueError(
                f"default_distribution must be one of {valid_distributions}"
            )
        
        # Validate FFT backend
        valid_backends = ['scipy', 'numpy']
        if self.fft_backend not in valid_backends:
            raise ValueError(f"fft_backend must be one of {valid_backends}")
        
        # Validate log level
        valid_log_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
        if self.log_level not in valid_log_levels:
            raise ValueError(f"log_level must be one of {valid_log_levels}")
        
        # Validate memory settings
        if self.memory_limit_mb <= 0:
            raise ValueError("memory_limit_mb must be positive")
        
        if self.vector_cache_size <= 0:
            raise ValueError("vector_cache_size must be positive")
        
        # Validate operation limits
        if self.max_bundling_vectors <= 0:
            raise ValueError("max_bundling_vectors must be positive")
        
        if self.operation_timeout_seconds <= 0:
            raise ValueError("operation_timeout_seconds must be positive")
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert configuration to dictionary."""
        return asdict(self)
    
    @classmethod
    def from_dict(cls, config_dict: Dict[str, Any]) -> 'HDCConfig':
        """Create configuration from dictionary."""
        return cls(**config_dict)
    
    def update(self, **kwargs) -> 'HDCConfig':
        """Update configuration with new values."""
        config_dict = self.to_dict()
        config_dict.update(kwargs)
        return self.from_dict(config_dict)
    
    def get_performance_config(self) -> Dict[str, Any]:
        """Get performance-related configuration."""
        return {
            'use_numba_acceleration': self.use_numba_acceleration,
            'fft_backend': self.fft_backend,
            'parallel_operations': self.parallel_operations,
            'max_workers': self.max_workers,
            'vector_cache_size': self.vector_cache_size,
            'memory_limit_mb': self.memory_limit_mb,
        }
    
    def get_operation_config(self) -> Dict[str, Any]:
        """Get operation-related configuration."""
        return {
            'vector_dimension': self.vector_dimension,
            'bundling_threshold': self.bundling_threshold,
            'cleanup_threshold': self.cleanup_threshold,
            'max_similarity_orthogonal': self.max_similarity_orthogonal,
            'default_distribution': self.default_distribution,
            'trait_reconstruction_accuracy': self.trait_reconstruction_accuracy,
        }
    
    def get_quality_config(self) -> Dict[str, Any]:
        """Get quality and accuracy configuration."""
        return {
            'trait_reconstruction_accuracy': self.trait_reconstruction_accuracy,
            'noise_robustness_level': self.noise_robustness_level,
            'orthogonality_validation': self.orthogonality_validation,
            'validate_operations': self.validate_operations,
        }


def load_config(config_path: Union[str, Path], 
                format_type: Optional[str] = None) -> HDCConfig:
    """
    Load configuration from file.
    
    Args:
        config_path: Path to configuration file
        format_type: File format ('yaml', 'json', 'toml'). Auto-detected if None.
        
    Returns:
        HDCConfig instance
        
    Raises:
        ValueError: If file format is unsupported
        FileNotFoundError: If configuration file doesn't exist
    """
    config_path = Path(config_path)
    
    if not config_path.exists():
        raise FileNotFoundError(f"Configuration file not found: {config_path}")
    
    # Auto-detect format if not specified
    if format_type is None:
        format_type = config_path.suffix.lower().lstrip('.')
    
    try:
        with open(config_path, 'r', encoding='utf-8') as f:
            if format_type in ['yaml', 'yml']:
                config_dict = yaml.safe_load(f)
            elif format_type == 'json':
                config_dict = json.load(f)
            elif format_type == 'toml':
                config_dict = toml.load(f)
            else:
                raise ValueError(f"Unsupported configuration format: {format_type}")
        
        logger.info(f"Loaded configuration from {config_path}")
        return HDCConfig.from_dict(config_dict)
    
    except Exception as e:
        logger.error(f"Failed to load configuration from {config_path}: {e}")
        raise


def save_config(config: HDCConfig, 
                config_path: Union[str, Path],
                format_type: Optional[str] = None) -> None:
    """
    Save configuration to file.
    
    Args:
        config: HDCConfig instance to save
        config_path: Path where to save configuration
        format_type: File format ('yaml', 'json', 'toml'). Auto-detected if None.
    """
    config_path = Path(config_path)
    
    # Auto-detect format if not specified
    if format_type is None:
        format_type = config_path.suffix.lower().lstrip('.')
    
    # Create directory if it doesn't exist
    config_path.parent.mkdir(parents=True, exist_ok=True)
    
    config_dict = config.to_dict()
    
    try:
        with open(config_path, 'w', encoding='utf-8') as f:
            if format_type in ['yaml', 'yml']:
                yaml.safe_dump(config_dict, f, default_flow_style=False, indent=2)
            elif format_type == 'json':
                json.dump(config_dict, f, indent=2, ensure_ascii=False)
            elif format_type == 'toml':
                toml.dump(config_dict, f)
            else:
                raise ValueError(f"Unsupported configuration format: {format_type}")
        
        logger.info(f"Saved configuration to {config_path}")
    
    except Exception as e:
        logger.error(f"Failed to save configuration to {config_path}: {e}")
        raise


def get_default_config() -> HDCConfig:
    """Get default HDC configuration."""
    return HDCConfig()


def create_config_template(output_path: Union[str, Path], 
                          format_type: str = 'yaml') -> None:
    """
    Create a configuration template file.
    
    Args:
        output_path: Path where to save the template
        format_type: Format for the template file
    """
    default_config = get_default_config()
    save_config(default_config, output_path, format_type)
    logger.info(f"Created configuration template at {output_path}")


class ConfigManager:
    """
    Manager class for handling multiple configurations and environments.
    """
    
    def __init__(self, base_config: Optional[HDCConfig] = None):
        """Initialize config manager with base configuration."""
        self.base_config = base_config or get_default_config()
        self.environments: Dict[str, HDCConfig] = {}
    
    def add_environment(self, name: str, config: HDCConfig) -> None:
        """Add named configuration environment."""
        self.environments[name] = config
        logger.info(f"Added configuration environment: {name}")
    
    def get_environment(self, name: str) -> HDCConfig:
        """Get configuration for named environment."""
        if name not in self.environments:
            raise KeyError(f"Configuration environment not found: {name}")
        return self.environments[name]
    
    def create_environment(self, name: str, **kwargs) -> HDCConfig:
        """Create new environment by updating base config."""
        env_config = self.base_config.update(**kwargs)
        self.add_environment(name, env_config)
        return env_config
    
    def list_environments(self) -> List[str]:
        """List available configuration environments."""
        return list(self.environments.keys())
    
    def load_environments_from_directory(self, directory: Union[str, Path]) -> None:
        """Load all configuration files from directory as environments."""
        directory = Path(directory)
        
        if not directory.exists():
            raise FileNotFoundError(f"Directory not found: {directory}")
        
        for config_file in directory.glob("*.yaml"):
            env_name = config_file.stem
            config = load_config(config_file)
            self.add_environment(env_name, config)
        
        for config_file in directory.glob("*.json"):
            env_name = config_file.stem  
            config = load_config(config_file)
            self.add_environment(env_name, config)
        
        logger.info(f"Loaded {len(self.environments)} environments from {directory}")


# Global configuration instance
_global_config: Optional[HDCConfig] = None


def get_global_config() -> HDCConfig:
    """Get global configuration instance."""
    global _global_config
    if _global_config is None:
        _global_config = get_default_config()
    return _global_config


def set_global_config(config: HDCConfig) -> None:
    """Set global configuration instance."""
    global _global_config
    _global_config = config
    logger.info("Updated global HDC configuration")


def reset_global_config() -> None:
    """Reset global configuration to default."""
    global _global_config
    _global_config = get_default_config()
    logger.info("Reset global HDC configuration to default")


@dataclass
class Config:
    """
    Main platform configuration class.
    
    This class provides configuration for the complete AI Historical Simulation Platform.
    """
    
    # Platform settings
    platform_name: str = "AI Historical Simulation Platform"
    version: str = "1.0.0"
    debug: bool = False
    log_level: str = "INFO"
    
    # HDC Configuration
    hdc: HDCConfig = field(default_factory=HDCConfig)
    
    # Database settings
    database_url: str = "sqlite:///historical_figures.db"
    data_directory: str = "data"
    
    # Performance settings
    max_concurrent_sessions: int = 1000
    response_timeout_seconds: float = 30.0
    cache_size: int = 1000
    
    # Historical figure settings
    max_historical_figures: int = 100
    default_historical_figures: List[str] = field(default_factory=lambda: [
        "Napoleon Bonaparte",
        "William Shakespeare", 
        "Albert Einstein",
        "Leonardo da Vinci",
        "Cleopatra",
        "Marie Curie",
        "Winston Churchill"
    ])
    
    # LLM settings
    llm_model: str = "gpt-3.5-turbo"
    llm_api_key: Optional[str] = None
    llm_temperature: float = 0.7
    llm_max_tokens: int = 2000
    
    # Web interface settings
    web_host: str = "localhost"
    web_port: int = 8080
    web_debug: bool = False
    
    def __post_init__(self):
        """Validate configuration after initialization."""
        self._validate_config()
    
    def _validate_config(self):
        """Validate configuration parameters."""
        if self.max_concurrent_sessions <= 0:
            raise ValueError("max_concurrent_sessions must be positive")
        
        if self.response_timeout_seconds <= 0:
            raise ValueError("response_timeout_seconds must be positive")
        
        if self.cache_size <= 0:
            raise ValueError("cache_size must be positive")
        
        if self.max_historical_figures <= 0:
            raise ValueError("max_historical_figures must be positive")
        
        if self.web_port <= 0 or self.web_port > 65535:
            raise ValueError("web_port must be between 1 and 65535")
        
        if self.llm_temperature < 0 or self.llm_temperature > 2:
            raise ValueError("llm_temperature must be between 0 and 2")
        
        if self.llm_max_tokens <= 0:
            raise ValueError("llm_max_tokens must be positive")
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert configuration to dictionary."""
        return asdict(self)
    
    @classmethod
    def from_dict(cls, config_dict: Dict[str, Any]) -> 'Config':
        """Create configuration from dictionary."""
        # Handle nested HDC config
        if 'hdc' in config_dict and isinstance(config_dict['hdc'], dict):
            config_dict['hdc'] = HDCConfig.from_dict(config_dict['hdc'])
        return cls(**config_dict)
    
    def get_database_config(self) -> Dict[str, Any]:
        """Get database-related configuration."""
        return {
            'database_url': self.database_url,
            'data_directory': self.data_directory
        }
    
    def get_performance_config(self) -> Dict[str, Any]:
        """Get performance-related configuration."""
        return {
            'max_concurrent_sessions': self.max_concurrent_sessions,
            'response_timeout_seconds': self.response_timeout_seconds,
            'cache_size': self.cache_size,
            'max_historical_figures': self.max_historical_figures
        }
    
    def get_llm_config(self) -> Dict[str, Any]:
        """Get LLM-related configuration."""
        return {
            'model': self.llm_model,
            'api_key': self.llm_api_key,
            'temperature': self.llm_temperature,
            'max_tokens': self.llm_max_tokens
        }
    
    def get_web_config(self) -> Dict[str, Any]:
        """Get web interface configuration."""
        return {
            'host': self.web_host,
            'port': self.web_port,
            'debug': self.web_debug
        }


def get_default_platform_config() -> Config:
    """Get default platform configuration."""
    return Config()