use rig::client::{CompletionClient, ImageGenerationClient, ProviderClient};
use rig::completion::Chat;
use rig::image_generation::ImageGenerationModel;

use base64::{prelude::BASE64_STANDARD, Engine};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::Path;

use crate::agent_types::AgentCategory;
use crate::agents::AgentManager;

/// Image generation request parameters
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageGenerationRequest {
    /// Text prompt for image generation
    pub prompt: String,
    /// Number of images to generate (default: 1)
    pub num_images: Option<u32>,
    /// Image width (default: 1024)
    pub width: Option<u32>,
    /// Image height (default: 1024)
    pub height: Option<u32>,
    /// Quality setting (default: standard)
    pub quality: Option<String>,
    /// Style setting (default: vivid)
    pub style: Option<String>,
    /// Additional parameters
    pub additional_params: Option<HashMap<String, serde_json::Value>>,
}

/// Generated image response
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GeneratedImage {
    /// Base64 encoded image data
    pub data: String,
    /// Image format (e.g., "png", "jpeg")
    pub format: String,
    /// Image width
    pub width: u32,
    /// Image height
    pub height: u32,
    /// Generation metadata
    pub metadata: HashMap<String, serde_json::Value>,
}

/// Image analysis request parameters
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageAnalysisRequest {
    /// Base64 encoded image data
    pub image_data: String,
    /// Image format (e.g., "png", "jpeg")
    pub image_format: String,
    /// Analysis prompt/question
    pub prompt: String,
    /// Additional parameters
    pub additional_params: Option<HashMap<String, serde_json::Value>>,
}

/// Image analysis response
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageAnalysisResponse {
    /// Analysis result text
    pub result: String,
    /// Confidence score (if available)
    pub confidence: Option<f32>,
    /// Additional metadata
    pub metadata: HashMap<String, serde_json::Value>,
}

/// Image manager for handling image generation and analysis
#[derive(Debug)]
pub struct ImageManager {
    // We'll use a different approach since these traits require Sized
    _generation_models: (),
    _analysis_models: (),
}

impl ImageManager {
    /// Create a new image manager
    pub fn new() -> Self {
        Self {
            _generation_models: (),
            _analysis_models: (),
        }
    }

    /// Create an image generation model for a provider (simplified approach)
    fn create_generation_model(
        &self,
        provider: &str,
        model: &str,
    ) -> anyhow::Result<rig::providers::openai::ImageGenerationModel> {
        match provider {
            "openai" => {
                let client = rig::providers::openai::Client::from_env();
                Ok(client.image_generation_model(model))
            }
            _ => Err(anyhow::anyhow!(
                "Unsupported provider for image generation: {}",
                provider
            )),
        }
    }

    /// Create an image analysis model for a provider (simplified approach)
    fn create_analysis_model(
        &self,
        provider: &str,
        model: &str,
    ) -> anyhow::Result<rig::providers::openai::responses_api::ResponsesCompletionModel> {
        match provider {
            "openai" => {
                let client = rig::providers::openai::Client::from_env();
                Ok(client.completion_model(model))
            }
            _ => Err(anyhow::anyhow!(
                "Unsupported provider for image analysis: {}",
                provider
            )),
        }
    }

    /// Generate images based on text prompt
    pub async fn generate_images(
        &self,
        provider: &str,
        model: &str,
        request: ImageGenerationRequest,
    ) -> anyhow::Result<Vec<GeneratedImage>> {
        let generation_model = self.create_generation_model(provider, model)?;

        let mut image_request = generation_model
            .image_generation_request()
            .prompt(&request.prompt);

        // Apply optional parameters
        if let Some(width) = request.width {
            image_request = image_request.width(width);
        }

        if let Some(height) = request.height {
            image_request = image_request.height(height);
        }

        // Send the request
        let response = image_request.send().await?;

        // Convert response to our format
        let generated_images = vec![GeneratedImage {
            data: BASE64_STANDARD.encode(&response.image),
            format: "png".to_string(), // Default format
            width: request.width.unwrap_or(1024),
            height: request.height.unwrap_or(1024),
            metadata: HashMap::new(),
        }];

        Ok(generated_images)
    }

    /// Analyze an image with AI
    pub async fn analyze_image(
        &self,
        provider: &str,
        model: &str,
        request: ImageAnalysisRequest,
    ) -> anyhow::Result<ImageAnalysisResponse> {
        let analysis_model = self.create_analysis_model(provider, model)?;

        // Create agent for image analysis
        let agent = rig::agent::AgentBuilder::new(analysis_model)
            .preamble("You are an image analysis expert. Analyze the provided image and answer the user's question accurately and concisely.")
            .build();

        // Create image message
        let image_message = rig::completion::Message::User {
            content: rig::one_or_many::OneOrMany::one(rig::message::UserContent::Image(
                rig::message::Image {
                    data: request.image_data.clone(),
                    media_type: Some(match request.image_format.as_str() {
                        "png" => rig::message::ImageMediaType::PNG,
                        "jpeg" | "jpg" => rig::message::ImageMediaType::JPEG,
                        "gif" => rig::message::ImageMediaType::GIF,
                        "webp" => rig::message::ImageMediaType::WEBP,
                        _ => rig::message::ImageMediaType::PNG,
                    }),
                    format: Some(rig::message::ContentFormat::Base64),
                    ..Default::default()
                },
            )),
        };

        // Add text prompt as a separate message
        let text_message = rig::completion::Message::User {
            content: rig::one_or_many::OneOrMany::one(rig::message::UserContent::Text(
                rig::message::Text {
                    text: request.prompt.clone(),
                },
            )),
        };

        // Send both messages - we need to use the correct API
        // For now, let's just send the text message with image included
        let combined_message = rig::completion::Message::User {
            content: rig::one_or_many::OneOrMany::many(vec![
                rig::message::UserContent::Image(rig::message::Image {
                    data: request.image_data.clone(),
                    media_type: Some(match request.image_format.as_str() {
                        "png" => rig::message::ImageMediaType::PNG,
                        "jpeg" | "jpg" => rig::message::ImageMediaType::JPEG,
                        "gif" => rig::message::ImageMediaType::GIF,
                        "webp" => rig::message::ImageMediaType::WEBP,
                        _ => rig::message::ImageMediaType::PNG,
                    }),
                    format: Some(rig::message::ContentFormat::Base64),
                    ..Default::default()
                }),
                rig::message::UserContent::Text(rig::message::Text {
                    text: request.prompt.clone(),
                }),
            ])?,
        };

        let response = agent.chat(combined_message, vec![]).await?;

        Ok(ImageAnalysisResponse {
            result: response,
            confidence: None, // You might extract this from the model response if available
            metadata: HashMap::new(),
        })
    }

    /// Save generated image to file
    pub async fn save_image_to_file(
        &self,
        image: &GeneratedImage,
        file_path: &Path,
    ) -> anyhow::Result<()> {
        let image_data = BASE64_STANDARD.decode(&image.data)?;
        tokio::fs::write(file_path, image_data).await?;
        Ok(())
    }

    /// Load image from file and convert to base64
    pub async fn load_image_from_file(&self, file_path: &Path) -> anyhow::Result<(String, String)> {
        let image_data = tokio::fs::read(file_path).await?;
        let base64_data = BASE64_STANDARD.encode(&image_data);

        // Determine format from file extension
        let format = file_path
            .extension()
            .and_then(|ext| ext.to_str())
            .unwrap_or("png");

        Ok((base64_data, format.to_string()))
    }
}

/// Create an image generation agent
pub async fn create_image_agent(
    agent_manager: &AgentManager,
    name: &str,
    provider: &str,
    model: &str,
    preamble: &str,
) -> anyhow::Result<i64> {
    agent_manager
        .create_agent(
            name,
            provider,
            model,
            preamble,
            None,
            None,
            AgentCategory::Image,
            None,
        )
        .await
}

/// Generate images using an image agent
pub async fn generate_with_agent(
    agent_manager: &AgentManager,
    image_manager: &ImageManager,
    agent_id: i64,
    request: ImageGenerationRequest,
) -> anyhow::Result<Vec<GeneratedImage>> {
    // Get the agent configuration
    let agent = agent_manager
        .get_agent(agent_id)
        .await?
        .ok_or_else(|| anyhow::anyhow!("Agent not found"))?;

    // Generate images
    image_manager
        .generate_images(
            &agent.base_config.provider,
            &agent.base_config.model,
            request,
        )
        .await
}

/// Analyze image using an image agent
pub async fn analyze_with_agent(
    agent_manager: &AgentManager,
    image_manager: &ImageManager,
    agent_id: i64,
    request: ImageAnalysisRequest,
) -> anyhow::Result<ImageAnalysisResponse> {
    // Get the agent configuration
    let agent = agent_manager
        .get_agent(agent_id)
        .await?
        .ok_or_else(|| anyhow::anyhow!("Agent not found"))?;

    // Analyze image
    image_manager
        .analyze_image(
            &agent.base_config.provider,
            &agent.base_config.model,
            request,
        )
        .await
}

#[cfg(test)]
mod tests {
    use super::*;
    use tempfile::TempDir;

    #[tokio::test]
    async fn test_image_manager_creation() {
        let manager = ImageManager::new();
        // Test that the manager was created successfully
        assert!(true);
    }

    #[tokio::test]
    async fn test_create_image_agent() {
        let temp_dir = TempDir::new().unwrap();
        let agents_file = temp_dir.path().join("agents.json");
        let agent_manager = AgentManager::new(&agents_file).unwrap();

        let result = create_image_agent(
            &agent_manager,
            "Test Image Agent",
            "openai",
            "dall-e-3",
            "You are an image generation assistant.",
        )
        .await;

        assert!(result.is_ok());
    }
}
