use rig_app::*;
use tempfile::TempDir;
use std::collections::HashMap;

#[tokio::main]
async fn main() -> anyhow::Result<()> {
    // Initialize tracing
    tracing_subscriber::fmt()
        .with_max_level(tracing::Level::INFO)
        .with_target(false)
        .init();

    // Create temporary directory for agent storage
    let temp_dir = TempDir::new()?;
    let agents_file = temp_dir.path().join("agents.json");

    // Initialize managers
    let agent_manager = AgentManager::new(&agents_file)?;
    let tool_manager = ToolManager::new();
    let embedding_manager = EmbeddingManager::new();
    let image_manager = ImageManager::new();
    let advanced_manager = AdvancedAgentManager::new(
        std::sync::Arc::new(agent_manager.clone()),
        std::sync::Arc::new(tool_manager.clone()),
        std::sync::Arc::new(embedding_manager.clone()),
        std::sync::Arc::new(image_manager.clone()),
    );

    println!("🚀 Universal Rig Agent Example");
    println!("=".repeat(50));

    // 1. Create a Chat Agent
    println!("\n1. Creating Chat Agent...");
    let chat_agent_id = agent_manager
        .create_agent_sync(
            "Universal Chat Assistant",
            "openai",
            "gpt-4",
            "You are a helpful AI assistant capable of answering questions, providing information, and engaging in meaningful conversations. Be friendly, informative, and concise.",
            None,
            None,
            AgentCategory::Chat,
            None,
        )?;
    println!("✅ Chat Agent created with ID: {}", chat_agent_id);

    // 2. Create an Embed Agent
    println!("\n2. Creating Embed Agent...");
    let embed_documents = vec![
        EmbeddingDocument {
            id: "doc1".to_string(),
            content: "Rust is a systems programming language that runs blazingly fast, prevents segfaults, and guarantees thread safety.".to_string(),
            metadata: {
                let mut meta = HashMap::new();
                meta.insert("topic".to_string(), "programming".to_string());
                meta.insert("language".to_string(), "rust".to_string());
                meta
            },
            vector: None,
        },
        EmbeddingDocument {
            id: "doc2".to_string(),
            content: "Machine learning is a subset of artificial intelligence that enables systems to learn and improve from experience without being explicitly programmed.".to_string(),
            metadata: {
                let mut meta = HashMap::new();
                meta.insert("topic".to_string(), "ai".to_string());
                meta.insert("field".to_string(), "ml".to_string());
                meta
            },
            vector: None,
        },
    ];

    let embed_agent_id = create_embedding_agent(
        &agent_manager,
        &embedding_manager,
        "Knowledge Base Assistant",
        "openai",
        "text-embedding-ada-002",
        "You are a knowledge base assistant. Use the provided context to answer questions accurately and concisely.",
        embed_documents,
    ).await?;
    println!("✅ Embed Agent created with ID: {}", embed_agent_id);

    // 3. Create an Image Agent
    println!("\n3. Creating Image Agent...");
    let image_agent_id = create_image_agent(
        &agent_manager,
        "Creative Image Generator",
        "openai",
        "dall-e-3",
        "You are a creative image generation assistant. Create vivid, detailed, and artistic images based on user descriptions.",
    ).await?;
    println!("✅ Image Agent created with ID: {}", image_agent_id);

    // 4. Test Chat Agent
    println!("\n4. Testing Chat Agent...");
    let chat_response = prompt(
        &agent_manager,
        &tool_manager,
        chat_agent_id,
        "What are the key features of Rust programming language?",
    ).await?;
    println!("💬 Chat Response: {}", chat_response);

    // 5. Test Embed Agent with RAG
    println!("\n5. Testing Embed Agent with RAG...");
    let rag_response = rag_query(
        &agent_manager,
        &embedding_manager,
        embed_agent_id,
        "What is machine learning?",
        2,
    ).await?;
    println!("📚 RAG Response: {}", rag_response);

    // 6. Test Image Agent
    println!("\n6. Testing Image Agent...");
    let image_request = ImageGenerationRequest {
        prompt: "A futuristic robot reading a book in a cozy library, digital art style".to_string(),
        num_images: Some(1),
        width: Some(1024),
        height: Some(1024),
        quality: None,
        style: None,
        additional_params: None,
    };

    match generate_with_agent(
        &agent_manager,
        &image_manager,
        image_agent_id,
        image_request,
    ).await {
        Ok(images) => {
            println!("🎨 Generated {} image(s)", images.len());
            for (i, image) in images.iter().enumerate() {
                println!("   Image {}: {}x{} {}", i + 1, image.width, image.height, image.format);
            }
        }
        Err(e) => println!("❌ Image generation failed: {}", e),
    }

    // 7. Create Advanced Workflow
    println!("\n7. Creating Advanced Workflow...");
    let workflow_config = WorkflowConfig {
        name: "content_creation_workflow".to_string(),
        description: "A workflow for creating content with multiple agents".to_string(),
        mode: OrchestrationMode::Sequential,
        agent_ids: vec![chat_agent_id],
        steps: Some(vec![
            WorkflowStep {
                name: "brainstorm".to_string(),
                agent_id: chat_agent_id,
                prompt_template: "Brainstorm ideas about: {input}".to_string(),
                pass_output: false,
                max_retries: 2,
            },
            WorkflowStep {
                name: "refine".to_string(),
                agent_id: chat_agent_id,
                prompt_template: "Refine and improve the following ideas: {input}".to_string(),
                pass_output: true,
                max_retries: 2,
            },
        ]),
        routing_rules: None,
    };

    advanced_manager.register_workflow(workflow_config).await?;
    println!("✅ Workflow registered successfully");

    // 8. Execute Workflow
    println!("\n8. Executing Workflow...");
    let workflow_result = advanced_manager
        .execute_workflow("content_creation_workflow", "sustainable technology")
        .await?;

    println!("🔄 Workflow Results:");
    println!("   Workflow: {}", workflow_result.workflow_name);
    println!("   Final Output: {}", workflow_result.final_output);
    println!("   Steps Executed: {}", workflow_result.step_results.len());
    
    for (i, step) in workflow_result.step_results.iter().enumerate() {
        println!("   Step {}: {} - {} ({})", 
            i + 1, 
            step.step_name, 
            if step.success { "✅" } else { "❌" },
            step.execution_time_ms
        );
    }

    // 9. List All Agents
    println!("\n9. Listing All Agents...");
    let all_agents = agent_manager.list_agents_sync()?;
    println!("📋 Total Agents: {}", all_agents.len());
    
    for agent in all_agents {
        println!("   - {} (ID: {}, Category: {:?}, Provider: {}, Model: {})", 
            agent.base_config.name,
            agent.base_config.id,
            agent.category,
            agent.base_config.provider,
            agent.base_config.model
        );
    }

    // 10. Test Multi-turn Chat
    println!("\n10. Testing Multi-turn Chat...");
    let mut history = vec![];
    
    let first_response = chat(
        &agent_manager,
        &tool_manager,
        chat_agent_id,
        "Hello! I'm learning about AI agents.",
        history.clone(),
    ).await?;
    
    history.push(ChatMessage {
        role: "user".to_string(),
        content: "Hello! I'm learning about AI agents.".to_string(),
    });
    history.push(ChatMessage {
        role: "assistant".to_string(),
        content: first_response.clone(),
    });
    
    println!("💬 First Response: {}", first_response);
    
    let second_response = chat(
        &agent_manager,
        &tool_manager,
        chat_agent_id,
        "Can you explain what makes them different from regular programs?",
        history,
    ).await?;
    
    println!("💬 Second Response: {}", second_response);

    println!("\n🎉 Universal Agent Example Completed Successfully!");
    println!("=".repeat(50));
    
    Ok(())
}

#[cfg(test)]
mod tests {
    use super::*;

    #[tokio::test]
    async fn test_universal_agent_creation() {
        let temp_dir = TempDir::new().unwrap();
        let agents_file = temp_dir.path().join("agents.json");
        
        let agent_manager = AgentManager::new(&agents_file).unwrap();
        let tool_manager = ToolManager::new();
        let embedding_manager = EmbeddingManager::new();
        let image_manager = ImageManager::new();
        
        let advanced_manager = AdvancedAgentManager::new(
            std::sync::Arc::new(agent_manager.clone()),
            std::sync::Arc::new(tool_manager.clone()),
            std::sync::Arc::new(embedding_manager.clone()),
            std::sync::Arc::new(image_manager.clone()),
        );
        
        // Test creating different types of agents
        let chat_agent_id = agent_manager
            .create_agent_sync(
                "Test Chat Agent",
                "openai",
                "gpt-4",
                "Test preamble",
                None,
                None,
                AgentCategory::Chat,
                None,
            )
            .unwrap();
        
        assert!(chat_agent_id > 0);
        
        // Test workflow registration
        let workflow = WorkflowConfig {
            name: "test_workflow".to_string(),
            description: "Test workflow".to_string(),
            mode: OrchestrationMode::Sequential,
            agent_ids: vec![chat_agent_id],
            steps: None,
            routing_rules: None,
        };
        
        let result = advanced_manager.register_workflow(workflow).await;
        assert!(result.is_ok());
    }
}
