use futures::stream::StreamExt;
use rig::{completion::prompt::Prompt, Rig};
use serde::{Deserialize, Serialize};
use tauri::Window;
use uuid::Uuid;

use crate::{AppConfig, ProviderConfig};

#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct StreamPayload {
    id: String,
    data: String,
}

#[derive(Debug, Serialize, Deserialize, Clone)]
pub enum FrontendEvent {
    StreamStart { id: String },
    StreamChunk(StreamPayload),
    StreamEnd { id: String },
    Error { message: String },
}

// Helper function to create a Rig instance from config
fn create_rig_from_config(provider: &ProviderConfig) -> Result<Rig, String> {
    match provider {
        ProviderConfig::OpenAI { api_key, model, .. } => {
            let client = rig::providers::openai::OpenAI::new(api_key);
            let rig = Rig::builder()
                .with_provider(client)
                .with_model(model)
                .build();
            Ok(rig)
        }
    }
}

#[tauri::command]
pub async fn parse_command_stream(
    app_config: AppConfig,
    command: String,
    context_files: Vec<String>,
    window: Window,
) -> Result<(), String> {
    let active_provider_id = app_config.active_provider_id.ok_or("No active provider selected")?;
    let active_provider = app_config
        .providers
        .iter()
        .find(|p| match p {
            ProviderConfig::OpenAI { id, .. } => id == &active_provider_id,
        })
        .ok_or("Active provider not found in configuration")?;

    let rig = create_rig_from_config(active_provider)?;

    let stream_id = Uuid::new_v4().to_string();
    window.emit("frontend-event", FrontendEvent::StreamStart { id: stream_id.clone() }).unwrap();

    let prompt = format!(
        "You are a helpful desktop assistant. Your goal is to help the user manage their file system.\n"
        "Based on the user's command, you must respond with a single, valid JSON object that represents the action to take.\n"
        "The user's current directory contains the following files: {}\n\n"
        "User command: \"{}\"\n\n"
        "JSON Response:",
        context_files.join(", "),
        command
    );

    let mut stream = rig.chat().create_stream(Prompt::from_string(prompt).unwrap()).await.map_err(|e| e.to_string())?;

    while let Some(chunk) = stream.next().await {
        match chunk {
            Ok(data) => {
                window.emit("frontend-event", FrontendEvent::StreamChunk(StreamPayload { id: stream_id.clone(), data })).unwrap();
            }
            Err(e) => {
                let error_message = format!("LLM stream error: {}", e);
                window.emit("frontend-event", FrontendEvent::Error { message: error_message.clone() }).unwrap();
                return Err(error_message);
            }
        }
    }

    window.emit("frontend-event", FrontendEvent::StreamEnd { id: stream_id.clone() }).unwrap();

    // TODO: In the next step, we will parse the full response and execute the action.

    Ok(())
}
