use std::time::Duration;

use async_trait::async_trait;
use reqwest::Client;
use reqwest::header::HeaderMap;
use serde::{Deserialize, Serialize};

use crate::{dao, runtime, state};
use crate::commands::commands::Command;
use crate::models::models;
use crate::models::models::command::CommandState;
use crate::models::models::commands::command_ai_config::{CommandAiConfig, Service};

#[derive(Debug, Default, Clone, Serialize)]
struct Response {
    content: String,
    input_tokens: i32,
    output_tokens: i32,
}

#[derive(Debug, Default, Clone)]
pub(crate) struct AiCommand {}

#[async_trait]
impl Command for AiCommand {
    async fn execute(&self, mut command: models::command::Command) -> models::command::Command {
        command.output = "数据处理中...".to_string();

        let config = dao::setting::get_setting::<CommandAiConfig>("command.ai.config");
        if config.is_none() {
            return self.save_command_output(command, CommandState::Failed, "AI命令缺少配置参数，请通过setting命令打开设置面板完成配置后重新执行".to_string());
        }

        let config = config.unwrap();

        match config.service {
            Service::AliYun => {
                // 后台异步运行
                runtime::tokio::runtime().spawn(AiCommand::ali_yun(command.clone(), config));
            }
            Service::Tencent => {
                return self.save_command_output(command, CommandState::Failed, "暂未支持腾讯云".to_string());
            }
        }

        command
    }
}

impl AiCommand {
    /// 保存消息并将消息发送给前端
    fn save_and_send_message(mut command: models::command::Command, state: CommandState, message: &str, append: bool, input_tokens: i32, output_tokens: i32) {
        command.state = state;
        command.output = if append { format!("{}{}", command.output, message) } else { message.to_string() };
        command.statistic = format!("{{\"input_tokens\":{}, \"output_tokens\":{}}}", input_tokens, output_tokens);

        let result = dao::command::update_command_history(&command);
        if result.is_err() {
            log::error!("更新历史命令失败: {}", result.unwrap_err().to_string());
        }

        // 发送给窗口
        let window = state::application::window();
        if window.is_some() {
            let window = window.unwrap();
            let result = window.emit(format!("ai-message-{}", command.command_id.as_str()).as_str(), command);
            if result.is_err() {
                log::error!("发送消息失败: {}", result.unwrap_err().to_string());
            }
        }
    }
}

/// 阿里云消息结构
#[derive(Debug, Deserialize, Clone, Default, Serialize)]
struct AliYunMessage {
    // 角色
    role: String,
    // 内容
    content: String,
}

#[derive(Debug, Default, Clone, Serialize)]
struct AliYunApiBodyInput {
    // prompt: String,
    messages: Vec<AliYunMessage>,
}

#[derive(Debug, Default, Clone, Serialize)]
struct AliYunApiBodyParameters {
    result_format: String,
    enable_search: bool,
}

#[derive(Debug, Serialize, Clone, Default)]
struct AliYunApiRequest {
    model: String,
    input: AliYunApiBodyInput,
    parameters: AliYunApiBodyParameters,
}

#[derive(Debug, Deserialize, Clone, Default)]
struct AliYunApiResponseUsage {
    input_tokens: i32,
    output_tokens: i32,
}

#[derive(Debug, Deserialize, Clone, Default)]
struct AliYunApiResponseOutputChoiceMessage {
    content: String,
}

#[derive(Debug, Deserialize, Clone, Default)]
struct AliYunApiResponseOutputChoice {
    message: AliYunApiResponseOutputChoiceMessage,
    finish_reason: String,
}

#[derive(Debug, Deserialize, Clone, Default)]
struct AliYunApiResponseOutput {
    choices: Vec<AliYunApiResponseOutputChoice>,
}

#[derive(Debug, Deserialize, Clone, Default)]
struct AliYunApiResponse {
    usage: AliYunApiResponseUsage,
    output: AliYunApiResponseOutput,
}

impl AiCommand {
    /// 阿里云服务
    async fn ali_yun(command: models::command::Command, config: CommandAiConfig) {
        let ali_yun_config = config.ali_yun_api_config.clone().unwrap();
        if ali_yun_config.token.is_empty() {
            AiCommand::save_and_send_message(command, CommandState::Failed, "未配置灵积API鉴权Token", false, 0, 0);

            return;
        }

        let api: &str = "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation";

        let mut headers = HeaderMap::new();
        headers.insert("Content-Type", "application/json".parse().unwrap());
        headers.insert("Authorization", format!("Bearer {}", ali_yun_config.token).parse().unwrap());
        headers.insert("X-DashScope-SSE", "enable".parse().unwrap());

        let mut body = AliYunApiRequest::default();
        body.model = ali_yun_config.model.to_string();

        // 如果携带上下文从数据库查询AI命令历史，否则使用最后一条
        let mut count = 1;
        if config.post_context {
            count = config.post_context_cnt;
        }

        let messages = dao::command::get_last_ai_commands(count);
        if messages.is_err() {
            AiCommand::save_and_send_message(command, CommandState::Failed, messages.unwrap_err().to_string().as_str(), false, 0, 0);

            return;
        }

        body.input.messages = Vec::new();

        let messages = messages.unwrap();
        for index in 0..messages.len() {
            let args = messages[index].args.clone();
            let tokens = args.split(" ").collect::<Vec<&str>>();
            let question = tokens[1..tokens.len()].join(" ").to_string();

            if 0 == index {
                body.input.messages.push(AliYunMessage {
                    role: "user".to_string(),
                    content: question,
                });

                continue;
            }

            body.input.messages.push(AliYunMessage {
                role: "assistant".to_string(),
                content: messages[index].output.clone(),
            });
            body.input.messages.push(AliYunMessage {
                role: "user".to_string(),
                content: question,
            });
        }

        // 追加系统默认会话
        body.input.messages.push(AliYunMessage {
            role: "system".to_string(),
            content: "DEFAULT".to_string(),
        });

        body.input.messages.reverse();
        body.parameters.result_format = "message".to_string();
        body.parameters.enable_search = ali_yun_config.enable_search;

        let body = serde_json::to_string(&body).unwrap();

        log::debug!("headers: {:?} body: {:?}", headers.clone(), body.clone());
        let result = Client::new().post(api).body(body).headers(headers).timeout(Duration::from_secs(120)).send().await;
        if result.is_err() {
            AiCommand::save_and_send_message(command, CommandState::Failed, format!("请求灵积语言模型失败: {}", result.unwrap_err().to_string()).as_str(), false, 0, 0);

            return;
        }

        let mut response = result.unwrap();
        loop {
            let bytes = response.chunk().await;
            if bytes.is_err() {
                AiCommand::save_and_send_message(command, CommandState::Failed, format!("获取灵积语言模型响应失败: {}", bytes.unwrap_err().to_string()).as_str(), true, 0, 0);

                return;
            }

            let bytes = bytes.unwrap();
            if bytes.is_none() {
                return;
            }

            let bytes = bytes.unwrap();
            let text = String::from_utf8_lossy(bytes.iter().as_slice()).to_string();

            // 处理消息
            let tokens = text.trim().split("\n").collect::<Vec<&str>>();
            if 4 != tokens.len() || !tokens[3].starts_with("data:") {
                log::error!("解析灵积模型响应失败: {:?}", text);
                AiCommand::save_and_send_message(command.clone(), CommandState::Failed, format!("解析灵积模型响应失败: {:?}", text).as_str(), true, 0, 0);

                continue;
            }

            let text = tokens[3].strip_prefix("data:").unwrap();
            let message = serde_json::from_str::<AliYunApiResponse>(text);
            if message.is_err() {
                log::error!("获取灵积语言模型响应失败: {:?} {}", text, message.unwrap_err().to_string());
                AiCommand::save_and_send_message(command.clone(), CommandState::Failed, format!("获取灵积语言模型响应失败: {:?}", text).as_str(), true, 0, 0);

                continue;
            }

            let message = message.unwrap();
            if 0 == message.output.choices.len() {
                log::error!("解析HTTP响应失败: AI模型未返回响应内容: {:?}", text);
                AiCommand::save_and_send_message(command.clone(), CommandState::Failed, format!("解析HTTP响应失败: AI模型未返回响应内容: {:?}", text).as_str(), true, 0, 0);

                continue;
            }

            AiCommand::save_and_send_message(command.clone(), CommandState::Success, message.output.choices[0].message.content.as_str(), false, message.usage.input_tokens, message.usage.output_tokens);

            if "null" != message.output.choices[0].finish_reason {
                return;
            }
        }
    }
}