//! AI service models
//! 
//! This module defines AI-related data structures for Lumos.ai integration.

use crate::models::{Value, ValueMap, Timestamp, now};
use serde::{Deserialize, Serialize};
use uuid::Uuid;

/// AI service options
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIServiceOptions {
    /// Service endpoint
    pub endpoint: String,
    
    /// API key
    pub api_key: String,
    
    /// Model configuration
    pub model_config: AIModelConfig,
    
    /// Request timeout
    pub timeout: Option<f64>,
    
    /// Maximum retries
    pub max_retries: Option<u32>,
}

/// AI model configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIModelConfig {
    /// Model name
    pub model: String,
    
    /// Temperature for generation
    pub temperature: Option<f32>,
    
    /// Maximum tokens
    pub max_tokens: Option<u32>,
    
    /// Top-p sampling
    pub top_p: Option<f32>,
    
    /// Additional parameters
    pub parameters: ValueMap,
}

/// AI model information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIModel {
    /// Model ID
    pub id: String,
    
    /// Model name
    pub name: String,
    
    /// Model description
    pub description: Option<String>,
    
    /// Model capabilities
    pub capabilities: Vec<String>,
    
    /// Model version
    pub version: String,
}

/// AI prompt structure
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIPrompt {
    /// Prompt ID
    pub id: Uuid,
    
    /// Prompt template
    pub template: String,
    
    /// Prompt variables
    pub variables: ValueMap,
    
    /// Prompt type
    pub prompt_type: AIPromptType,
    
    /// Creation timestamp
    pub created_at: Timestamp,
}

/// AI prompt types
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AIPromptType {
    /// Test case generation
    TestGeneration,
    
    /// Assertion generation
    AssertionGeneration,
    
    /// Failure analysis
    FailureAnalysis,
    
    /// Performance analysis
    PerformanceAnalysis,
    
    /// Test optimization
    TestOptimization,
    
    /// Custom prompt
    Custom(String),
}

/// AI response structure
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIResponse {
    /// Response ID
    pub id: Uuid,
    
    /// Response content
    pub content: String,
    
    /// Response metadata
    pub metadata: AIResponseMetadata,
    
    /// Response timestamp
    pub timestamp: Timestamp,
}

/// AI response metadata
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIResponseMetadata {
    /// Model used
    pub model: String,
    
    /// Token usage
    pub token_usage: Option<TokenUsage>,
    
    /// Response time in milliseconds
    pub response_time: u64,
    
    /// Confidence score
    pub confidence: Option<f32>,
    
    /// Additional metadata
    pub extra: ValueMap,
}

/// Token usage information
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TokenUsage {
    /// Prompt tokens
    pub prompt_tokens: u32,
    
    /// Completion tokens
    pub completion_tokens: u32,
    
    /// Total tokens
    pub total_tokens: u32,
}

impl AIPrompt {
    /// Create a new AI prompt
    pub fn new(template: String, prompt_type: AIPromptType) -> Self {
        Self {
            id: Uuid::new_v4(),
            template,
            variables: ValueMap::new(),
            prompt_type,
            created_at: now(),
        }
    }
    
    /// Set variables
    pub fn with_variables(mut self, variables: ValueMap) -> Self {
        self.variables = variables;
        self
    }
    
    /// Add a variable
    pub fn add_variable(mut self, key: String, value: Value) -> Self {
        self.variables.insert(key, value);
        self
    }
    
    /// Render the prompt with variables
    pub fn render(&self) -> crate::Result<String> {
        // TODO: Implement template rendering with variables
        // This will use a template engine like Handlebars or Tera
        Ok(self.template.clone())
    }
}

impl AIResponse {
    /// Create a new AI response
    pub fn new(content: String, model: String) -> Self {
        Self {
            id: Uuid::new_v4(),
            content,
            metadata: AIResponseMetadata {
                model,
                token_usage: None,
                response_time: 0,
                confidence: None,
                extra: ValueMap::new(),
            },
            timestamp: now(),
        }
    }
    
    /// Set token usage
    pub fn with_token_usage(mut self, usage: TokenUsage) -> Self {
        self.metadata.token_usage = Some(usage);
        self
    }
    
    /// Set response time
    pub fn with_response_time(mut self, response_time: u64) -> Self {
        self.metadata.response_time = response_time;
        self
    }
    
    /// Set confidence score
    pub fn with_confidence(mut self, confidence: f32) -> Self {
        self.metadata.confidence = Some(confidence);
        self
    }
}

impl Default for AIModelConfig {
    fn default() -> Self {
        Self {
            model: "gpt-3.5-turbo".to_string(),
            temperature: Some(0.7),
            max_tokens: Some(1000),
            top_p: Some(1.0),
            parameters: ValueMap::new(),
        }
    }
}
