use generator_engine::tera::TeraEngine;
use model_graph_common::transport::http::{http_get, http_post};
use model_graph_types::assistant::prompts::PromptTemplateItem;
use model_graph_types::modeling::assistant::{ModelProvider, ModelProviderSetting, ModelType};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::{Arc, Mutex};

use model_graph_types::{
    container::workflow::{
        LlmStatement, WorkflowBlockExecuteResult, WorkflowBlockExecuteResultBuilder,
        WorkflowBlockExecuteResultStatus,
    },
    generator::CodeGenerator,
    modeling::Value,
};

use model_graph_model::{
    model::{default_model_name, get_model},
    provider::get_provider,
};

use crate::workflow::context::{Getter, WorkflowContext};

pub async fn execute(
    statement: &LlmStatement,
    context: Arc<Mutex<WorkflowContext>>,
) -> anyhow::Result<HashMap<String, Value>> {
    tracing::debug!("LLM HTTP execute.....");
    //默认的模型名称
    let (default_provider_name, default_model_name) = default_model_name(ModelType::LLM).await?;

    let prompts = &statement.prompt_template;
    let model = &statement.model;

    //获取provider
    let provider_name = if &model.provider != "" {
        model.provider.clone()
    } else {
        default_provider_name
    };
    let model_name = if let Some(model_name) = &model.name {
        //获取模型
        let (model, model_setting) = get_model(model_name).await?;
        tracing::debug!("Model:{:?}|{:?}", model, model_setting);
        model.name.clone()
    } else {
        default_model_name
    };

    tracing::debug!("Provider:{:?},Model:{:?}", provider_name, model_name);
    let (provider, provider_setting) = get_provider(&provider_name).await?;
    tracing::debug!("Provider:{:?}|{:?}", provider, provider_setting);

    let temperature = if let Some(temperature) = &model.completion_params {
        if let Some(t) = temperature.temperature {
            t
        } else {
            0.8
        }
    } else {
        0.8
    };
    //执行
    let url = _get_url_from_provider(&provider, &provider_setting, &model_name)?;

    let mut headers = HashMap::new();
    headers.insert(
        String::from("Content-Type"),
        String::from("application/json"),
    );

    //TOKEN
    if let Ok((name, key)) = _get_token_from(&provider, &provider_setting) {
        //
        headers.insert(name, key);
    }

    let body = serde_json::to_string(&serde_json::json!({
        "model": model_name,
        "temperature": temperature,
        "stream": false,
        "messages": _get_prompts(prompts,context.clone()).await?
    }))?;
    //TODO 超时
    let timeout = 120000u64;

    tracing::debug!("{}", url);
    tracing::debug!("{:?}", headers);
    tracing::debug!("{}", body);

    //调用节点
    let (_, _, response) = http_post(&url, &headers, body.into_bytes()).await?;
    tracing::debug!(
        "返回的数据:{}",
        String::from_utf8(response.clone()).unwrap()
    );

    //转换json
    let res_messages: OpenAIChatResponse = serde_json::from_slice(&response)?;

    let response_message: Vec<String> = res_messages
        .choices
        .iter()
        .map(|item| item.message.content.clone())
        .collect();

    tracing::debug!("{:?}", response_message);

    let mut outputs: HashMap<String, Value> = HashMap::new();

    outputs.insert(
        String::from("text"),
        Value::String(response_message.join("\n\n")),
    );
    Ok(outputs)
}

fn _get_token_from(
    provider: &ModelProvider,
    setting: &ModelProviderSetting,
) -> anyhow::Result<(String, String)> {
    let config_str = &setting.encrypted_config;
    //
    if provider.name == "openai" {
        let res_messages: OpenAISetting = serde_json::from_str(config_str.as_str())?;
        return Ok((
            String::from("Authorization"),
            format!("Bearer {}", res_messages.openai_api_key),
        ));
    } else {
        let res_messages: KeySetting = serde_json::from_str(config_str.as_str())?;
        return Ok((
            String::from("Authorization"),
            format!("Bearer {}", res_messages.api_key),
        ));
    }
}

fn _get_url_from_provider(
    provider: &ModelProvider,
    setting: &ModelProviderSetting,
    model_name: &String,
) -> anyhow::Result<String> {
    let config_str = &setting.encrypted_config;

    //
    if provider.name == "openai" {
        let res_messages: OpenAISetting = serde_json::from_str(config_str.as_str())?;
        return Ok(format!(
            "{}/v1/chat/completions",
            res_messages.openai_api_base
        ));
    } else {
        let res_messages: KeySetting = serde_json::from_str(config_str.as_str())?;
        return Ok(format!("{}/chat/completions", res_messages.url_base));
    }
}

async fn _get_prompts(
    prompt_template: &Vec<PromptTemplateItem>,
    context: Arc<Mutex<WorkflowContext>>,
) -> anyhow::Result<Vec<serde_json::Value>> {
    let mut list: Vec<serde_json::Value> = vec![];

    //
    let engine = TeraEngine::default();
    let arguments = Value::Object(context.get_map_values(Some(String::from("N")))?);

    for prompt in prompt_template {
        if prompt.text.contains("{{") {
            let prompt_new = prompt.text.replace("{{#", "{{N").replace("#}}", "}}");
            let prompt_str = engine.simple(arguments.clone(), &prompt_new).await?;
            list.push(serde_json::json!({
                "role": prompt.role,
                "content": prompt_str
            }));
        } else {
            list.push(serde_json::json!({
                "role": prompt.role,
                "content": prompt.text
            }));
        }
    }
    Ok(list)
}

#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
struct OpenAIChoiceMessage {
    pub role: Option<String>,
    pub content: String,
}

#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)]
struct OpenAIChoice {
    pub index: Option<i32>,
    pub message: OpenAIChoiceMessage,
    pub finish_reason: Option<String>,
}

#[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq)]
struct OpenAIChatResponse {
    pub choices: Vec<OpenAIChoice>,
}

#[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq)]
struct OpenAISetting {
    pub openai_api_key: String,
    pub openai_api_base: String,
    pub openai_organization: String,
}

#[derive(Debug, Default, Serialize, Deserialize, Clone, PartialEq)]
struct KeySetting {
    pub api_key: String,
    pub url_base: String,
}
