use askama::Template;
use colored::*;
use rig::{
    completion::Chat,
    message::{AssistantContent, Message, UserContent},
    providers::{self},
    OneOrMany,
};
use serde::{Deserialize, Serialize};
use std::env;
mod tool;

#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ToolDesc {
    pub name: String,
    pub description: String,
}

#[derive(Template)]
#[template(path = "qwen2-5.txt")]
struct LlmPrompt {
    system: String,
    // tools: Vec<FunctionDeclaration>
    tools: Vec<ToolDesc>,
}

fn main() {
    let pwd: String = "/home/x/company/跨链/波卡链/heima/".to_string();
    let goal = "用户是发起跨链交易时如何签名的？";
    println!("pwd: {}", pwd);

    let code_llm_api_key = env::var("CODE_LLM_API_KEY").unwrap_or("".to_string());

    let client = providers::deepseek::Client::from_url(
        code_llm_api_key.as_str(),
        "https://api.siliconflow.cn/v1",
    );
    let deepseek_agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel> = client
        .agent("Qwen/Qwen3-32B")
        .temperature(1.0)
        // .max_tokens(512)
        // .tool(tool::tree::TreeCmd{project_path:pwd.clone()})
        .tool(tool::ls::LsCmd{project_path:pwd.clone()})
        .tool(tool::cat::CatCmd{project_path:pwd.clone()})
        .tool(tool::grep::GrepCmd{project_path:pwd.clone()})
        .preamble(
            format!(
                "
你是代码助手，请你解决：{}。围绕这一主题自行探索。
当前处于项目目录，使用相对路径访问项目下的文件。多利用工具探索项目，可以执行多轮,依据上下文回答，有理有据.
如果根据上下文能确定答案，先输出FinalAnswer，表示这是最后一次输出，然后换行输出答案，格式示例如下:
```
FinalAnswer
回答
```
",
                goal
            )
            .as_str(),
        )
        .build();
    let mut chat_memory = ChatMemory::new(deepseek_agent);

    // 智能体循环
    let mut next_chat: String = "".to_string();
    for i in 0..100 {
        println!("round {}", i);
        let llm_result = chat_memory.call(&next_chat.to_string());
        next_chat = format!("{}", llm_result);
        println!("{}", llm_result.green());

        // 是否结束
        if llm_result.contains("FinalAnswer") {
            let final_answer = llm_result.split("FinalAnswer").into_iter().last();
            println!("{}", final_answer.unwrap_or("not found").red());
            return;
        }
    }
}

// 记忆模块
struct ChatMemory {
    chat_agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel>,
    chat_history: Vec<Message>,
}

impl ChatMemory {
    fn new(
        chat_agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel>,
    ) -> ChatMemory {
        let chat_memory = ChatMemory {
            chat_agent: chat_agent,
            chat_history: Vec::new(),
        };
        chat_memory
    }

    fn call(&mut self, text: &String) -> String {
        self.chat_history.push(Message::User {
            content: OneOrMany::one(UserContent::text(text)),
        });

        let rt = tokio::runtime::Runtime::new().unwrap();
        let resp: Result<String, rig::completion::PromptError> =
            rt.block_on(self.chat_agent.chat("", self.chat_history.clone()));

        if let Ok(result) = resp {
            // self.chat_history.push(Message::Assistant {
            //     content: OneOrMany::one(AssistantContent::text(result.clone())),
            // });
            return result.clone();
        } else {
            if let Err(msg) = resp {
                println!("{}", msg);
            }
            return "".to_string();
        }
    }
}

#[test]
fn test_llm() {
    let client = providers::deepseek::Client::from_url("", "http://localhost:11434/v1");
    let deepseek_agent_pre = client
        .agent("qwen2.5:14b-instruct")
        .temperature(0.1)
        .max_tokens(512)
        .preamble(
            format!(
                "
你是代码助手，请你解决：ruoyi-system模块提供了哪些接口，输出所有的接口。
当前处于项目目录，使用相对路径访问项目下的文件，尽量多利用工具探索项目：

根据上下文如果能确定答案，按下面的格式输出结果
```
Thought: 我现在知道最终答案
Final Answer: Question的最终答案，详细输出
```

现在开始回答，在给出最终答案前多按照指定格式进行一步一步的推理。
        "
            )
            .as_str(),
        );

    let deepseek_agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel> =
        deepseek_agent_pre
            .tool(tool::tree::TreeCmd {
                project_path: "/home/x/projects/ruoyi".to_string(),
            })
            .tool(tool::ls::LsCmd {
                project_path: "/home/x/projects/ruoyi".to_string(),
            })
            .tool(tool::cat::CatCmd {
                project_path: "/home/x/projects/ruoyi".to_string(),
            })
            .build();

    let rt = tokio::runtime::Runtime::new().unwrap();
    let resp: Result<String, rig::completion::PromptError> =
        rt.block_on(deepseek_agent.chat("", Vec::new()));
    println!("resp:\n{:?}", resp);
}
