"""
Configuration management system for CodeMCP.

Handles loading and validation of configuration from TOML files,
environment variables, and command-line arguments.
"""

import os
import toml
from typing import Dict, Any, Optional, Union, List
from pathlib import Path
from dataclasses import dataclass, field
from .error_handler import ConfigurationError, log_info, log_warning


@dataclass
class ServerConfig:
    """Server configuration settings."""
    port: int = 8889
    host: str = "localhost"
    workers: int = 4
    max_tree_depth: int = 50
    cache_ttl_hours: int = 24
    timeout_seconds: int = 300


@dataclass
class LanguageConfig:
    """Language analysis configuration."""
    python_enabled: bool = True
    cpp_enabled: bool = True
    fortran_enabled: bool = True
    typescript_enabled: bool = True
    
    # Language-specific settings
    python_version: str = "3.8+"
    cpp_standard: str = "c++17"
    fortran_standard: str = "f2008"


@dataclass
class DocumentConfig:
    """Document analysis configuration."""
    latex_enabled: bool = True
    markdown_enabled: bool = True
    
    # Document-specific settings
    max_section_depth: int = 10
    extract_code_blocks: bool = True


@dataclass
class LLMConfig:
    """LLM integration configuration."""
    provider: str = "openai"  # openai, anthropic, local
    model: str = "gpt-4"
    api_key: Optional[str] = None
    api_base: Optional[str] = None
    batch_size: int = 10
    max_tokens: int = 4096
    temperature: float = 0.1
    timeout_seconds: int = 30
    max_retries: int = 3


@dataclass
class ManualModeConfig:
    """Manual exploration mode configuration."""
    enabled: bool = True
    timeout_seconds: int = 300
    max_selections: int = 10
    syntax_highlighting: bool = True
    show_complexity_scores: bool = True


@dataclass
class CacheConfig:
    """Caching configuration."""
    enabled: bool = True
    type: str = "memory"  # memory, redis, file
    redis_url: Optional[str] = None
    file_cache_dir: Optional[str] = None
    max_memory_mb: int = 1024
    ttl_seconds: int = 3600


@dataclass
class LoggingConfig:
    """Logging configuration."""
    level: str = "INFO"
    format: str = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    file_path: Optional[str] = None
    max_file_size_mb: int = 100
    backup_count: int = 5


@dataclass
class Config:
    """Main configuration class."""
    server: ServerConfig = field(default_factory=ServerConfig)
    languages: LanguageConfig = field(default_factory=LanguageConfig)
    documents: DocumentConfig = field(default_factory=DocumentConfig)
    llm: LLMConfig = field(default_factory=LLMConfig)
    manual_mode: ManualModeConfig = field(default_factory=ManualModeConfig)
    cache: CacheConfig = field(default_factory=CacheConfig)
    logging: LoggingConfig = field(default_factory=LoggingConfig)
    
    # Additional settings
    debug: bool = False
    project_root: Optional[str] = None
    custom_settings: Dict[str, Any] = field(default_factory=dict)
    
    @classmethod
    def from_file(cls, config_path: Union[str, Path]) -> "Config":
        """Load configuration from TOML file."""
        config_path = Path(config_path)
        
        if not config_path.exists():
            raise ConfigurationError(f"Configuration file not found: {config_path}")
        
        try:
            config_data = toml.load(config_path)
            log_info(f"Loaded configuration from {config_path}")
            return cls.from_dict(config_data)
        except Exception as e:
            raise ConfigurationError(f"Failed to load configuration: {e}", 
                                   config_key=str(config_path))
    
    @classmethod
    def from_dict(cls, data: Dict[str, Any]) -> "Config":
        """Create configuration from dictionary."""
        config = cls()
        
        # Update server settings
        if "server" in data:
            config.server = cls._update_dataclass(config.server, data["server"])
        
        # Update language settings
        if "languages" in data:
            config.languages = cls._update_dataclass(config.languages, data["languages"])
        
        # Update document settings
        if "documents" in data:
            config.documents = cls._update_dataclass(config.documents, data["documents"])
        
        # Update LLM settings
        if "llm" in data:
            config.llm = cls._update_dataclass(config.llm, data["llm"])
        
        # Update manual mode settings
        if "manual_mode" in data:
            config.manual_mode = cls._update_dataclass(config.manual_mode, data["manual_mode"])
        
        # Update cache settings
        if "cache" in data:
            config.cache = cls._update_dataclass(config.cache, data["cache"])
        
        # Update logging settings
        if "logging" in data:
            config.logging = cls._update_dataclass(config.logging, data["logging"])
        
        # Update top-level settings
        for key in ["debug", "project_root"]:
            if key in data:
                setattr(config, key, data[key])
        
        # Store any custom settings
        config.custom_settings = {k: v for k, v in data.items() 
                                if k not in ["server", "languages", "documents", "llm", 
                                           "manual_mode", "cache", "logging", "debug", "project_root"]}
        
        return config
    
    @staticmethod
    def _update_dataclass(instance, data: Dict[str, Any]):
        """Update dataclass instance with dictionary data."""
        for key, value in data.items():
            if hasattr(instance, key):
                setattr(instance, key, value)
            else:
                log_warning(f"Unknown configuration key: {key}")
        return instance
    
    def merge_env_vars(self, prefix: str = "CODEMCP_"):
        """Merge environment variables into configuration."""
        env_mappings = {
            f"{prefix}SERVER_PORT": ("server", "port", int),
            f"{prefix}SERVER_HOST": ("server", "host", str),
            f"{prefix}SERVER_WORKERS": ("server", "workers", int),
            f"{prefix}LLM_PROVIDER": ("llm", "provider", str),
            f"{prefix}LLM_MODEL": ("llm", "model", str),
            f"{prefix}LLM_API_KEY": ("llm", "api_key", str),
            f"{prefix}LLM_API_BASE": ("llm", "api_base", str),
            f"{prefix}DEBUG": ("debug", None, lambda x: x.lower() in ["true", "1", "yes"]),
            f"{prefix}LOG_LEVEL": ("logging", "level", str),
            f"{prefix}CACHE_TYPE": ("cache", "type", str),
            f"{prefix}CACHE_REDIS_URL": ("cache", "redis_url", str),
        }
        
        for env_var, (section, key, converter) in env_mappings.items():
            value = os.getenv(env_var)
            if value is not None:
                try:
                    converted_value = converter(value) if converter else value
                    if key is None:
                        # Top-level setting
                        setattr(self, section, converted_value)
                    else:
                        # Nested setting
                        section_obj = getattr(self, section)
                        setattr(section_obj, key, converted_value)
                    log_info(f"Applied environment variable {env_var}")
                except Exception as e:
                    log_warning(f"Failed to apply environment variable {env_var}: {e}")
    
    def validate(self):
        """Validate configuration settings."""
        errors = []
        
        # Validate server settings
        if not (1 <= self.server.port <= 65535):
            errors.append("Server port must be between 1 and 65535")
        
        if self.server.workers < 1:
            errors.append("Server workers must be at least 1")
        
        if self.server.max_tree_depth < 1:
            errors.append("Max tree depth must be at least 1")
        
        # Validate LLM settings
        if self.llm.provider not in ["openai", "anthropic", "local"]:
            errors.append("LLM provider must be 'openai', 'anthropic', or 'local'")
        
        if self.llm.batch_size < 1:
            errors.append("LLM batch size must be at least 1")
        
        if self.llm.max_tokens < 1:
            errors.append("LLM max tokens must be at least 1")
        
        if not (0.0 <= self.llm.temperature <= 2.0):
            errors.append("LLM temperature must be between 0.0 and 2.0")
        
        # Validate cache settings
        if self.cache.type not in ["memory", "redis", "file"]:
            errors.append("Cache type must be 'memory', 'redis', or 'file'")
        
        if self.cache.type == "redis" and not self.cache.redis_url:
            errors.append("Redis URL required when cache type is 'redis'")
        
        if self.cache.max_memory_mb < 1:
            errors.append("Cache max memory must be at least 1 MB")
        
        # Validate logging settings
        valid_log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
        if self.logging.level.upper() not in valid_log_levels:
            errors.append(f"Log level must be one of: {', '.join(valid_log_levels)}")
        
        if errors:
            raise ConfigurationError(f"Configuration validation failed: {'; '.join(errors)}")
        
        log_info("Configuration validation passed")
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert configuration to dictionary."""
        return {
            "server": {
                "port": self.server.port,
                "host": self.server.host,
                "workers": self.server.workers,
                "max_tree_depth": self.server.max_tree_depth,
                "cache_ttl_hours": self.server.cache_ttl_hours,
                "timeout_seconds": self.server.timeout_seconds,
            },
            "languages": {
                "python_enabled": self.languages.python_enabled,
                "cpp_enabled": self.languages.cpp_enabled,
                "fortran_enabled": self.languages.fortran_enabled,
                "typescript_enabled": self.languages.typescript_enabled,
                "python_version": self.languages.python_version,
                "cpp_standard": self.languages.cpp_standard,
                "fortran_standard": self.languages.fortran_standard,
            },
            "documents": {
                "latex_enabled": self.documents.latex_enabled,
                "markdown_enabled": self.documents.markdown_enabled,
                "max_section_depth": self.documents.max_section_depth,
                "extract_code_blocks": self.documents.extract_code_blocks,
            },
            "llm": {
                "provider": self.llm.provider,
                "model": self.llm.model,
                "api_key": self.llm.api_key,
                "api_base": self.llm.api_base,
                "batch_size": self.llm.batch_size,
                "max_tokens": self.llm.max_tokens,
                "temperature": self.llm.temperature,
                "timeout_seconds": self.llm.timeout_seconds,
                "max_retries": self.llm.max_retries,
            },
            "manual_mode": {
                "enabled": self.manual_mode.enabled,
                "timeout_seconds": self.manual_mode.timeout_seconds,
                "max_selections": self.manual_mode.max_selections,
                "syntax_highlighting": self.manual_mode.syntax_highlighting,
                "show_complexity_scores": self.manual_mode.show_complexity_scores,
            },
            "cache": {
                "enabled": self.cache.enabled,
                "type": self.cache.type,
                "redis_url": self.cache.redis_url,
                "file_cache_dir": self.cache.file_cache_dir,
                "max_memory_mb": self.cache.max_memory_mb,
                "ttl_seconds": self.cache.ttl_seconds,
            },
            "logging": {
                "level": self.logging.level,
                "format": self.logging.format,
                "file_path": self.logging.file_path,
                "max_file_size_mb": self.logging.max_file_size_mb,
                "backup_count": self.logging.backup_count,
            },
            "debug": self.debug,
            "project_root": self.project_root,
            **self.custom_settings
        }
    
    def save_to_file(self, config_path: Union[str, Path]):
        """Save configuration to TOML file."""
        config_path = Path(config_path)
        config_path.parent.mkdir(parents=True, exist_ok=True)
        
        try:
            with open(config_path, 'w') as f:
                toml.dump(self.to_dict(), f)
            log_info(f"Configuration saved to {config_path}")
        except Exception as e:
            raise ConfigurationError(f"Failed to save configuration: {e}",
                                   config_key=str(config_path))


def load_config(config_path: Optional[Union[str, Path]] = None,
               merge_env: bool = True,
               validate: bool = True) -> Config:
    """
    Load configuration with sensible defaults.
    
    Args:
        config_path: Path to configuration file (optional)
        merge_env: Whether to merge environment variables
        validate: Whether to validate configuration
        
    Returns:
        Configured Config instance
    """
    # Start with default configuration
    if config_path:
        config = Config.from_file(config_path)
    else:
        config = Config()
    
    # Merge environment variables
    if merge_env:
        config.merge_env_vars()
    
    # Validate configuration
    if validate:
        config.validate()
    
    return config