use anyhow::Result;
use rusqlite::params;
use serde::{Deserialize, Serialize};
use tauri::State;
use reqwest;

use super::storage::AppDb;
use super::model::AiModel;
use super::password::MasterPasswordCache;

/// Conversation representation
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Conversation {
    pub id: Option<i64>,
    pub title: String,
    pub model_id: i64,
    pub created_at: Option<String>,
    pub updated_at: Option<String>,
}

/// Message representation
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Message {
    pub id: Option<i64>,
    pub conversation_id: i64,
    pub role: String, // "user" or "assistant"
    pub content: String,
    pub created_at: Option<String>,
}

/// OpenAI API request structure
#[derive(Debug, Serialize)]
struct OpenAIRequest {
    model: String,
    messages: Vec<OpenAIMessage>,
    #[serde(skip_serializing_if = "Option::is_none")]
    max_tokens: Option<i64>,
    #[serde(skip_serializing_if = "Option::is_none")]
    temperature: Option<f64>,
}

#[derive(Debug, Serialize, Deserialize)]
struct OpenAIMessage {
    role: String,
    content: String,
}

#[derive(Debug, Deserialize)]
struct OpenAIResponse {
    choices: Vec<OpenAIChoice>,
}

#[derive(Debug, Deserialize)]
struct OpenAIChoice {
    message: OpenAIMessage,
}

/// Create a new conversation
#[tauri::command]
pub async fn create_conversation(
    db: State<'_, AppDb>,
    title: String,
    model_id: i64,
) -> Result<i64, String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    conn.execute(
        "INSERT INTO conversations (title, model_id) VALUES (?1, ?2)",
        params![title, model_id],
    )
    .map_err(|e| format!("Failed to create conversation: {}", e))?;
    
    Ok(conn.last_insert_rowid())
}

/// Get all conversations
#[tauri::command]
pub async fn get_conversations(
    db: State<'_, AppDb>,
) -> Result<Vec<Conversation>, String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    let mut stmt = conn
        .prepare(
            "SELECT id, title, model_id, created_at, updated_at 
             FROM conversations 
             ORDER BY updated_at DESC"
        )
        .map_err(|e| e.to_string())?;
    
    let conversations = stmt
        .query_map([], |row| {
            Ok(Conversation {
                id: row.get(0)?,
                title: row.get(1)?,
                model_id: row.get(2)?,
                created_at: row.get(3)?,
                updated_at: row.get(4)?,
            })
        })
        .map_err(|e| e.to_string())?
        .collect::<rusqlite::Result<Vec<_>>>()
        .map_err(|e| e.to_string())?;
    
    Ok(conversations)
}

/// Get a specific conversation
#[tauri::command]
pub async fn get_conversation(
    db: State<'_, AppDb>,
    id: i64,
) -> Result<Conversation, String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    let conversation = conn
        .query_row(
            "SELECT id, title, model_id, created_at, updated_at 
             FROM conversations 
             WHERE id = ?1",
            params![id],
            |row| {
                Ok(Conversation {
                    id: row.get(0)?,
                    title: row.get(1)?,
                    model_id: row.get(2)?,
                    created_at: row.get(3)?,
                    updated_at: row.get(4)?,
                })
            },
        )
        .map_err(|e| format!("Conversation not found: {}", e))?;
    
    Ok(conversation)
}

/// Update conversation title
#[tauri::command]
pub async fn update_conversation(
    db: State<'_, AppDb>,
    id: i64,
    title: String,
) -> Result<(), String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    conn.execute(
        "UPDATE conversations SET title = ?1, updated_at = CURRENT_TIMESTAMP WHERE id = ?2",
        params![title, id],
    )
    .map_err(|e| format!("Failed to update conversation: {}", e))?;
    
    Ok(())
}

/// Update conversation model
#[tauri::command]
pub async fn update_conversation_model(
    db: State<'_, AppDb>,
    id: i64,
    model_id: i64,
) -> Result<(), String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    // Verify the model exists
    let model_exists: bool = conn
        .query_row(
            "SELECT COUNT(*) FROM ai_models WHERE id = ?1",
            params![model_id],
            |row| {
                let count: i64 = row.get(0)?;
                Ok(count > 0)
            },
        )
        .map_err(|e| format!("Failed to verify model: {}", e))?;
    
    if !model_exists {
        return Err("Model not found".to_string());
    }
    
    conn.execute(
        "UPDATE conversations SET model_id = ?1, updated_at = CURRENT_TIMESTAMP WHERE id = ?2",
        params![model_id, id],
    )
    .map_err(|e| format!("Failed to update conversation model: {}", e))?;
    
    Ok(())
}

/// Delete a conversation
#[tauri::command]
pub async fn delete_conversation(
    db: State<'_, AppDb>,
    id: i64,
) -> Result<(), String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    conn.execute(
        "DELETE FROM conversations WHERE id = ?1",
        params![id],
    )
    .map_err(|e| format!("Failed to delete conversation: {}", e))?;
    
    Ok(())
}

/// Get messages for a conversation
#[tauri::command]
pub async fn get_messages(
    db: State<'_, AppDb>,
    conversation_id: i64,
) -> Result<Vec<Message>, String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    let mut stmt = conn
        .prepare(
            "SELECT id, conversation_id, role, content, created_at 
             FROM messages 
             WHERE conversation_id = ?1 
             ORDER BY created_at ASC"
        )
        .map_err(|e| e.to_string())?;
    
    let messages = stmt
        .query_map(params![conversation_id], |row| {
            Ok(Message {
                id: row.get(0)?,
                conversation_id: row.get(1)?,
                role: row.get(2)?,
                content: row.get(3)?,
                created_at: row.get(4)?,
            })
        })
        .map_err(|e| e.to_string())?
        .collect::<rusqlite::Result<Vec<_>>>()
        .map_err(|e| e.to_string())?;
    
    Ok(messages)
}

/// Save a message
#[tauri::command]
pub async fn save_message(
    db: State<'_, AppDb>,
    conversation_id: i64,
    role: String,
    content: String,
) -> Result<i64, String> {
    let conn = db.0.lock().map_err(|e| e.to_string())?;
    
    conn.execute(
        "INSERT INTO messages (conversation_id, role, content) VALUES (?1, ?2, ?3)",
        params![conversation_id, role, content],
    )
    .map_err(|e| format!("Failed to save message: {}", e))?;
    
    // Update conversation updated_at
    conn.execute(
        "UPDATE conversations SET updated_at = CURRENT_TIMESTAMP WHERE id = ?1",
        params![conversation_id],
    )
    .map_err(|e| format!("Failed to update conversation timestamp: {}", e))?;
    
    Ok(conn.last_insert_rowid())
}

/// Generate conversation title based on first message
async fn generate_conversation_title(
    model: &AiModel,
    first_user_message: &str,
) -> Result<String, String> {
    let prompt = format!(
        "基于以下用户的第一条消息，生成一个简短的对话标题（10-20个字符）。\
        只返回标题文字，不要引号，不要其他说明。\n\n用户消息：\n{}",
        first_user_message
    );
    
    let request = OpenAIRequest {
        model: model.model_id.clone(),
        messages: vec![OpenAIMessage {
            role: "user".to_string(),
            content: prompt,
        }],
        max_tokens: Some(50), // 限制标题长度
        temperature: Some(0.7),
    };
    
    let client = reqwest::Client::new();
    let mut request_builder = client
        .post(&model.api_endpoint)
        .header("Authorization", format!("Bearer {}", model.api_key))
        .header("Content-Type", "application/json");
    
    // Add OpenRouter specific headers
    if model.model_type == "openrouter" {
        request_builder = request_builder
            .header("HTTP-Referer", "https://github.com/tauri-apps/tauri")
            .header("X-Title", "ssenx_tool_box");
    }
    
    let response = request_builder
        .json(&request)
        .send()
        .await
        .map_err(|e| format!("Failed to generate title: {}", e))?;
    
    if !response.status().is_success() {
        log::warn!("Failed to generate conversation title, using default");
        return Err("Failed to generate title".to_string());
    }
    
    let response_text = response
        .text()
        .await
        .map_err(|e| format!("Failed to read title response: {}", e))?;
    
    let ai_response: OpenAIResponse = serde_json::from_str(&response_text)
        .map_err(|e| format!("Failed to parse title response: {}", e))?;
    
    let title = ai_response
        .choices
        .first()
        .ok_or_else(|| "No title generated".to_string())?
        .message
        .content
        .clone()
        .trim()
        .to_string();
    
    // 限制标题长度
    let title = if title.len() > 30 {
        format!("{}...", &title[..27])
    } else {
        title
    };
    
    Ok(title)
}

/// Send message to AI and get response
#[tauri::command]
pub async fn send_chat_message(
    db: State<'_, AppDb>,
    password_cache: State<'_, MasterPasswordCache>,
    conversation_id: i64,
    message: String,
) -> Result<String, String> {
    // Get conversation to find model_id
    let conversation = {
        let conn = db.0.lock().map_err(|e| e.to_string())?;
        conn.query_row(
            "SELECT id, title, model_id, created_at, updated_at FROM conversations WHERE id = ?1",
            params![conversation_id],
            |row| {
                Ok(Conversation {
                    id: row.get(0)?,
                    title: row.get(1)?,
                    model_id: row.get(2)?,
                    created_at: row.get(3)?,
                    updated_at: row.get(4)?,
                })
            },
        )
        .map_err(|e| format!("Conversation not found: {}", e))?
    };
    
    // Get AI model configuration
    let model = {
        let cache = password_cache.0.lock().map_err(|e| e.to_string())?;
        let master_password = cache.as_ref()
            .ok_or_else(|| "未找到主密码，请重新登录".to_string())?;
        
        let conn = db.0.lock().map_err(|e| e.to_string())?;
        
        // Get encrypted model data
        let (encrypted_key, model_data): (String, (String, String, String, Option<i64>, Option<f64>)) = conn
            .query_row(
                "SELECT encrypted_api_key, model_id, model_type, api_endpoint, max_tokens, temperature 
                 FROM ai_models 
                 WHERE id = ?1",
                params![conversation.model_id],
                |row| {
                    Ok((
                        row.get(0)?,
                        (
                            row.get(1)?,
                            row.get(2)?,
                            row.get(3)?,
                            row.get(4)?,
                            row.get(5)?,
                        )
                    ))
                },
            )
            .map_err(|e| format!("Model not found: {}", e))?;
        
        // Decrypt API key
        let api_key = super::model::decrypt_data(&encrypted_key, master_password)
            .map_err(|e| format!("Failed to decrypt API key: {}", e))?;
        
        AiModel {
            id: Some(conversation.model_id),
            name: String::new(),
            model_id: model_data.0,
            model_type: model_data.1,
            api_endpoint: model_data.2,
            api_key,
            is_default: false,
            is_enabled: true,
            max_tokens: model_data.3,
            temperature: model_data.4,
            description: None,
            created_at: None,
            updated_at: None,
        }
    };
    
    // Save user message
    save_message(db.clone(), conversation_id, "user".to_string(), message.clone()).await?;
    
    // Get conversation history
    let messages = get_messages(db.clone(), conversation_id).await?;
    
    // Prepare API request
    let openai_messages: Vec<OpenAIMessage> = messages
        .iter()
        .map(|m| OpenAIMessage {
            role: m.role.clone(),
            content: m.content.clone(),
        })
        .collect();
    
    let request = OpenAIRequest {
        model: model.model_id.clone(),
        messages: openai_messages,
        max_tokens: model.max_tokens,
        temperature: model.temperature,
    };
    
    log::info!("Sending request to AI API: model={}, endpoint={}, messages_count={}", 
               model.model_id, model.api_endpoint, messages.len());
    
    // Call AI API
    let client = reqwest::Client::new();
    let mut request_builder = client
        .post(&model.api_endpoint)
        .header("Authorization", format!("Bearer {}", model.api_key))
        .header("Content-Type", "application/json");
    
    // Add OpenRouter specific headers
    if model.model_type == "openrouter" {
        request_builder = request_builder
            .header("HTTP-Referer", "https://github.com/tauri-apps/tauri")
            .header("X-Title", "ssenx_tool_box");
    }
    
    let response = request_builder
        .json(&request)
        .send()
        .await
        .map_err(|e| format!("Failed to call AI API: {}", e))?;
    
    let status = response.status();
    log::info!("AI API response status: {}", status);
    
    if !status.is_success() {
        let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string());
        log::error!("AI API error response: {}", error_text);
        return Err(format!("AI API error ({}): {}", status, error_text));
    }
    
    // Get response text for logging
    let response_text = response
        .text()
        .await
        .map_err(|e| format!("Failed to read response text: {}", e))?;
    
    log::info!("AI API response body: {}", response_text);
    
    // Parse the response
    let ai_response: OpenAIResponse = serde_json::from_str(&response_text)
        .map_err(|e| format!("Failed to parse AI response: {}. Response was: {}", e, response_text))?;
    
    let assistant_message = ai_response
        .choices
        .first()
        .ok_or_else(|| {
            log::error!("AI response has no choices");
            "No response from AI".to_string()
        })?
        .message
        .content
        .clone();
    
    log::info!("AI assistant response: {}", assistant_message);
    
    // Save assistant message
    save_message(db.clone(), conversation_id, "assistant".to_string(), assistant_message.clone()).await?;
    
    log::info!("Message saved to database successfully");
    
    // Check if this is the first message (only 2 messages: user + assistant)
    let message_count = messages.len() + 1; // +1 for the assistant message we just saved
    if message_count == 2 {
        log::info!("This is the first exchange, generating conversation title...");
        
        // Try to generate a title based on the first user message
        match generate_conversation_title(&model, &message).await {
            Ok(title) => {
                log::info!("Generated title: {}", title);
                
                // Update conversation title
                let conn = db.0.lock().map_err(|e| e.to_string())?;
                conn.execute(
                    "UPDATE conversations SET title = ?1, updated_at = CURRENT_TIMESTAMP WHERE id = ?2",
                    params![title, conversation_id],
                )
                .map_err(|e| format!("Failed to update conversation title: {}", e))?;
                
                log::info!("Conversation title updated successfully");
            }
            Err(e) => {
                log::warn!("Failed to generate title: {}, keeping default title", e);
            }
        }
    }
    
    Ok(assistant_message)
}

/// Make decrypt_data public for use in this module
pub use super::model::decrypt_data;
