// use std::error::Error;
use serde::{Deserialize, Serialize};
use reqwest::Client;
use tauri::command;
use serde_json::Value;

#[derive(Debug, Serialize, Deserialize)]
struct ChatMessage {
    role: String,
    content: String,
}

#[derive(Debug, Serialize, Deserialize)]
struct ChatRequest {
    model: String,
    messages: Vec<ChatMessage>,
    temperature: f32,
    max_tokens: u32,
}

#[derive(Debug, Serialize, Deserialize)]
struct OpenAIResponse {
    id: Option<String>,
    choices: Option<Vec<ChatChoice>>,
    error: Option<OpenAIError>,
}

#[derive(Debug, Serialize, Deserialize)]
struct ChatChoice {
    message: ChatMessage,
    finish_reason: Option<String>,
    index: u32,
}

#[derive(Debug, Serialize, Deserialize)]
struct OpenAIError {
    message: String,
    r#type: Option<String>,
    code: Option<String>,
}

#[derive(Debug, Serialize, Deserialize)]
struct APIResponse {
    success: bool,
    content: Option<String>,
    error: Option<String>,
}

#[command]
async fn call_openai_api(api_key: String, request_options: String) -> Result<APIResponse, String> {
    // 解析请求选项
    let request_options: ChatRequest = match serde_json::from_str(&request_options) {
        Ok(options) => options,
        Err(e) => {
            return Ok(APIResponse {
                success: false,
                content: None,
                error: Some(format!("解析请求选项失败: {}", e)),
            });
        }
    };

    // 创建HTTP客户端
    let client = Client::new();
    
    // 发送请求到OpenAI API
    let res = client
        .post("https://api.openai.com/v1/chat/completions")
        .header("Content-Type", "application/json")
        .header("Authorization", format!("Bearer {}", api_key))
        .json(&request_options)
        .send()
        .await;

    match res {
        Ok(response) => {
            // let status = response.status();
            
            // 尝试解析响应
            match response.json::<OpenAIResponse>().await {
                Ok(openai_response) => {
                    // 检查是否有错误
                    if let Some(error) = openai_response.error {
                        return Ok(APIResponse {
                            success: false,
                            content: None,
                            error: Some(format!("OpenAI API错误: {}", error.message)),
                        });
                    }
                    
                    // 提取回复内容
                    match openai_response.choices {
                        Some(choices) if !choices.is_empty() => {
                            let content = choices[0].message.content.clone();
                            Ok(APIResponse {
                                success: true,
                                content: Some(content),
                                error: None,
                            })
                        }
                        _ => Ok(APIResponse {
                            success: false,
                            content: None,
                            error: Some("API响应中没有回复内容".to_string()),
                        }),
                    }
                }
                Err(e) => Ok(APIResponse {
                    success: false,
                    content: None,
                    error: Some(format!("解析OpenAI响应失败: {}", e)),
                }),
            }
        }
        Err(e) => Ok(APIResponse {
            success: false,
            content: None,
            error: Some(format!("请求OpenAI API失败: {}", e)),
        }),
    }
}

// 在lib.rs中直接实现call_ai_api功能，不再从main.rs调用
#[command]
async fn call_ai_api(api_key: String, request_options: String) -> Result<Value, String> {
    // 解析请求选项
    let chat_request: ChatRequest = match serde_json::from_str(&request_options) {
        Ok(parsed) => parsed,
        Err(e) => {
            return Err(format!("解析请求选项失败: {}", e));
        }
    };

    // 获取模型名称
    let model = chat_request.model.as_str();
    println!("Processing request for model: {}", model);
    
    // 确定API提供商
    let provider = if model.contains("gpt") {
        "openai"
    } else if model.contains("deepseek") {
        "deepseek" 
    } else if model.contains("gemini") {
        "gemini"
    } else if model.contains("claude") {
        "anthropic"
    } else {
        "openai" // 默认
    };
    
    println!("Detected provider: {} for model: {}", provider, model);

    // 根据提供商选择API端点和头信息
    let (api_endpoint, headers) = match provider {
        "openai" => (
            "https://api.openai.com/v1/chat/completions".to_string(),
            vec![
                ("Content-Type".to_string(), "application/json".to_string()),
                ("Authorization".to_string(), format!("Bearer {}", api_key)),
            ],
        ),
        "deepseek" => (
            "https://api.deepseek.com/v1/chat/completions".to_string(),
            vec![
                ("Content-Type".to_string(), "application/json".to_string()),
                ("Authorization".to_string(), format!("Bearer {}", api_key)),
            ],
        ),
        "gemini" => {
            let endpoint = format!(
                "https://generativelanguage.googleapis.com/v1/models/{}:generateContent",
                model
            );
            (
                endpoint,
                vec![
                    ("Content-Type".to_string(), "application/json".to_string()),
                    ("Authorization".to_string(), format!("Bearer {}", api_key)),
                ],
            )
        },
        "anthropic" => (
            "https://api.anthropic.com/v1/messages".to_string(),
            vec![
                ("Content-Type".to_string(), "application/json".to_string()),
                ("x-api-key".to_string(), api_key),
                ("anthropic-version".to_string(), "2023-06-01".to_string()),
            ],
        ),
        _ => (
            "https://api.openai.com/v1/chat/completions".to_string(),
            vec![
                ("Content-Type".to_string(), "application/json".to_string()),
                ("Authorization".to_string(), format!("Bearer {}", api_key)),
            ],
        ),
    };

    // 创建HTTP客户端
    let client = reqwest::Client::new();
    let mut request_builder = client.post(&api_endpoint);

    // 添加请求头
    for (key, value) in headers {
        request_builder = request_builder.header(key, value);
    }

    // 构建请求体
    let request_body = serde_json::json!({
        "model": model,
        "messages": chat_request.messages,
        "temperature": chat_request.temperature,
        "max_tokens": chat_request.max_tokens
    });

    println!("Sending request with model: {}", model);

    // 发送请求并获取响应
    let response = match request_builder.json(&request_body).send().await {
        Ok(resp) => resp,
        Err(e) => {
            return Err(format!("请求失败: {}", e));
        }
    };

    // 检查响应状态
    if !response.status().is_success() {
        let status = response.status();
        let error_text = response.text().await.unwrap_or_else(|_| "无法获取错误信息".into());
        return Err(format!("API返回错误 ({}): {}", status, error_text));
    }

    // 解析响应内容
    match response.json::<Value>().await {
        Ok(json_data) => Ok(json_data),
        Err(e) => Err(format!("解析响应失败: {}", e)),
    }
}

#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
  tauri::Builder::default()
    .setup(|app| {
      if cfg!(debug_assertions) {
        app.handle().plugin(
          tauri_plugin_log::Builder::default()
            .level(log::LevelFilter::Info)
            .build(),
        )?;
      }
      Ok(())
    })
    .invoke_handler(tauri::generate_handler![call_openai_api, call_ai_api])
    .plugin(tauri_plugin_store::Builder::default().build())
    .run(tauri::generate_context!())
    .expect("error while running tauri application");
}
