use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::env;
use std::path::PathBuf;

/// Application configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
    /// Database settings
    pub database: DatabaseConfig,
    /// Embedding settings
    pub embeddings: EmbeddingConfig,
    /// Image generation settings
    pub images: ImageConfig,
    /// API provider settings
    pub providers: ProviderConfig,
    /// Performance settings
    pub performance: PerformanceConfig,
    /// Security settings
    pub security: SecurityConfig,
    /// Logging settings
    pub logging: LoggingConfig,
}

/// Database configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DatabaseConfig {
    /// Path to agents database file
    pub agents_file: PathBuf,
    /// Path to tools database file
    pub tools_file: PathBuf,
    /// Auto-backup enabled
    pub auto_backup: bool,
    /// Backup interval in seconds
    pub backup_interval_seconds: u64,
}

/// Embedding configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmbeddingConfig {
    /// Default provider for embeddings
    pub default_provider: String,
    /// Default model for embeddings
    pub default_model: String,
    /// Vector dimension
    pub vector_dimension: usize,
    /// Cache embeddings in memory
    pub cache_embeddings: bool,
    /// Maximum cache size
    pub max_cache_size: usize,
}

/// Image generation configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageConfig {
    /// Default provider for images
    pub default_provider: String,
    /// Default model for images
    pub default_model: String,
    /// Maximum image size
    pub max_image_size: u32,
    /// Supported formats
    pub supported_formats: Vec<String>,
    /// Enable image analysis
    pub enable_analysis: bool,
}

/// API provider configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderConfig {
    /// OpenAI configuration
    pub openai: OpenAIConfig,
    /// Anthropic configuration
    pub anthropic: AnthropicConfig,
    /// Timeout in seconds
    pub timeout_seconds: u64,
    /// Maximum retries
    pub max_retries: u32,
    /// Rate limit requests per minute
    pub rate_limit_rpm: u32,
}

/// OpenAI specific configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAIConfig {
    /// API key (from environment)
    #[serde(skip_serializing)]
    pub api_key: Option<String>,
    /// Base URL
    pub base_url: String,
    /// Organization ID
    pub organization_id: Option<String>,
}

/// Anthropic specific configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnthropicConfig {
    /// API key (from environment)
    #[serde(skip_serializing)]
    pub api_key: Option<String>,
    /// Base URL
    pub base_url: String,
}

/// Performance configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceConfig {
    /// Enable connection pooling
    pub enable_connection_pool: bool,
    /// Maximum pool size
    pub max_pool_size: usize,
    /// Connection timeout in seconds
    pub connection_timeout_seconds: u64,
    /// Enable response caching
    pub enable_response_cache: bool,
    /// Cache TTL in seconds
    pub cache_ttl_seconds: u64,
}

/// Security configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecurityConfig {
    /// Enable API key validation
    pub validate_api_keys: bool,
    /// Enable request rate limiting
    pub enable_rate_limiting: bool,
    /// Max requests per minute per client
    pub max_requests_per_minute: u32,
    /// Enable content filtering
    pub enable_content_filtering: bool,
    /// Allowed file extensions for uploads
    pub allowed_file_extensions: Vec<String>,
}

/// Logging configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LoggingConfig {
    /// Log level
    pub level: String,
    /// Enable structured logging
    pub structured: bool,
    /// Log to file
    pub log_to_file: bool,
    /// Log file path
    pub log_file_path: Option<PathBuf>,
    /// Enable performance metrics
    pub enable_metrics: bool,
    /// Metrics collection interval in seconds
    pub metrics_interval_seconds: u64,
}

impl Default for AppConfig {
    fn default() -> Self {
        Self {
            database: DatabaseConfig::default(),
            embeddings: EmbeddingConfig::default(),
            images: ImageConfig::default(),
            providers: ProviderConfig::default(),
            performance: PerformanceConfig::default(),
            security: SecurityConfig::default(),
            logging: LoggingConfig::default(),
        }
    }
}

impl Default for DatabaseConfig {
    fn default() -> Self {
        Self {
            agents_file: PathBuf::from("agents.json"),
            tools_file: PathBuf::from("tools.json"),
            auto_backup: true,
            backup_interval_seconds: 3600,
        }
    }
}

impl Default for EmbeddingConfig {
    fn default() -> Self {
        Self {
            default_provider: "openai".to_string(),
            default_model: "text-embedding-ada-002".to_string(),
            vector_dimension: 1536,
            cache_embeddings: true,
            max_cache_size: 1000,
        }
    }
}

impl Default for ImageConfig {
    fn default() -> Self {
        Self {
            default_provider: "openai".to_string(),
            default_model: "dall-e-3".to_string(),
            max_image_size: 4096,
            supported_formats: vec!["png".to_string(), "jpeg".to_string(), "webp".to_string()],
            enable_analysis: true,
        }
    }
}

impl Default for ProviderConfig {
    fn default() -> Self {
        Self {
            openai: OpenAIConfig::default(),
            anthropic: AnthropicConfig::default(),
            timeout_seconds: 30,
            max_retries: 3,
            rate_limit_rpm: 60,
        }
    }
}

impl Default for OpenAIConfig {
    fn default() -> Self {
        Self {
            api_key: env::var("OPENAI_API_KEY").ok(),
            base_url: "https://api.openai.com/v1".to_string(),
            organization_id: env::var("OPENAI_ORGANIZATION_ID").ok(),
        }
    }
}

impl Default for AnthropicConfig {
    fn default() -> Self {
        Self {
            api_key: env::var("ANTHROPIC_API_KEY").ok(),
            base_url: "https://api.anthropic.com".to_string(),
        }
    }
}

impl Default for PerformanceConfig {
    fn default() -> Self {
        Self {
            enable_connection_pool: true,
            max_pool_size: 10,
            connection_timeout_seconds: 10,
            enable_response_cache: true,
            cache_ttl_seconds: 300,
        }
    }
}

impl Default for SecurityConfig {
    fn default() -> Self {
        Self {
            validate_api_keys: true,
            enable_rate_limiting: true,
            max_requests_per_minute: 100,
            enable_content_filtering: false,
            allowed_file_extensions: vec!["txt".to_string(), "json".to_string(), "md".to_string()],
        }
    }
}

impl Default for LoggingConfig {
    fn default() -> Self {
        Self {
            level: "info".to_string(),
            structured: true,
            log_to_file: false,
            log_file_path: Some(PathBuf::from("rig-app.log")),
            enable_metrics: true,
            metrics_interval_seconds: 60,
        }
    }
}

impl AppConfig {
    /// Load configuration from environment variables and defaults
    pub fn load() -> Result<Self> {
        let mut config = AppConfig::default();

        // Override with environment variables if present
        if let Ok(log_level) = env::var("RIG_LOG_LEVEL") {
            config.logging.level = log_level;
        }

        if let Ok(agents_file) = env::var("RIG_AGENTS_FILE") {
            config.database.agents_file = PathBuf::from(agents_file);
        }

        if let Ok(tools_file) = env::var("RIG_TOOLS_FILE") {
            config.database.tools_file = PathBuf::from(tools_file);
        }

        if let Ok(enable_cache) = env::var("RIG_ENABLE_CACHE") {
            config.performance.enable_response_cache = enable_cache.parse().unwrap_or(true);
        }

        if let Ok(rate_limit) = env::var("RIG_RATE_LIMIT_RPM") {
            config.providers.rate_limit_rpm = rate_limit.parse().unwrap_or(60);
        }

        // Validate configuration
        config.validate()?;

        Ok(config)
    }

    /// Validate configuration values
    pub fn validate(&self) -> Result<()> {
        // Validate file paths
        if self.database.agents_file.parent().is_none() {
            return Err(anyhow::anyhow!("Invalid agents file path"));
        }

        if self.database.tools_file.parent().is_none() {
            return Err(anyhow::anyhow!("Invalid tools file path"));
        }

        // Validate timeouts
        if self.providers.timeout_seconds == 0 {
            return Err(anyhow::anyhow!("Timeout must be greater than 0"));
        }

        if self.performance.connection_timeout_seconds == 0 {
            return Err(anyhow::anyhow!("Connection timeout must be greater than 0"));
        }

        // Validate rate limits
        if self.providers.rate_limit_rpm == 0 {
            return Err(anyhow::anyhow!("Rate limit must be greater than 0"));
        }

        if self.security.max_requests_per_minute == 0 {
            return Err(anyhow::anyhow!(
                "Max requests per minute must be greater than 0"
            ));
        }

        // Validate pool sizes
        if self.performance.max_pool_size == 0 {
            return Err(anyhow::anyhow!("Max pool size must be greater than 0"));
        }

        // Validate cache settings
        if self.embeddings.max_cache_size == 0 {
            return Err(anyhow::anyhow!("Max cache size must be greater than 0"));
        }

        Ok(())
    }

    /// Get OpenAI API key with validation
    pub fn get_openai_api_key(&self) -> Result<&str> {
        self.providers
            .openai
            .api_key
            .as_ref()
            .ok_or_else(|| anyhow::anyhow!("OpenAI API key not configured"))
            .map(|s| s.as_str())
    }

    /// Get Anthropic API key with validation
    pub fn get_anthropic_api_key(&self) -> Result<&str> {
        self.providers
            .anthropic
            .api_key
            .as_ref()
            .ok_or_else(|| anyhow::anyhow!("Anthropic API key not configured"))
            .map(|s| s.as_str())
    }

    /// Check if a provider is configured
    pub fn is_provider_configured(&self, provider: &str) -> bool {
        match provider {
            "openai" => self.providers.openai.api_key.is_some(),
            "anthropic" => self.providers.anthropic.api_key.is_some(),
            _ => false,
        }
    }

    /// Get data directory
    pub fn get_data_dir(&self) -> PathBuf {
        self.database
            .agents_file
            .parent()
            .unwrap_or(&PathBuf::from("."))
            .to_path_buf()
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_default_config() {
        let config = AppConfig::default();
        assert!(config.validate().is_ok());
    }

    #[test]
    fn test_config_validation() {
        let mut config = AppConfig::default();
        config.providers.timeout_seconds = 0;
        assert!(config.validate().is_err());
    }

    #[test]
    fn test_provider_configuration() {
        let config = AppConfig::default();
        // In test environment, we don't expect API keys to be set
        assert!(!config.is_provider_configured("openai"));
        assert!(!config.is_provider_configured("anthropic"));
    }
}
