use std::{
    env,
    io::{self, Read},
    iter,
};

use clap::{Arg, Command};
use colored::Colorize;
use futures::future::join_all;
use itertools::Itertools;
use language::{
    go_parser::Go_Parser, java_parser::Java_Parser, javascript_parser::JavaScript_Parser,
    rust_parser::Rust_Parser, vue_parser::Vue_Parser, Language_Parser,
};
use log::debug;
use regex::Regex;
use rig::{
    completion::Chat,
    embeddings::EmbeddingsBuilder,
    message::{AssistantContent, Message, UserContent},
    providers::{self, ollama::Client},
    vector_store::VectorStoreIndexDyn,
    Embed, OneOrMany,
};
use rig_sqlite::{Column, ColumnValue, SqliteVectorStore, SqliteVectorStoreTable};
use rusqlite::ffi::sqlite3_auto_extension;
use serde::Deserialize;
use sqlite_vec::sqlite3_vec_init;
use tokio;
use tokio_rusqlite::Connection;
use util::property;
use walkdir::WalkDir;

mod language;
mod tool;
mod util;

#[derive(Embed, Clone, Debug, Deserialize)]
struct Document {
    id: String,
    content: String,
    #[embed]
    desc: String,
}

impl SqliteVectorStoreTable for Document {
    fn name() -> &'static str {
        "documents"
    }

    fn schema() -> Vec<Column> {
        vec![
            Column::new("id", "TEXT PRIMARY KEY"),
            Column::new("content", "TEXT"),
            Column::new("desc", "TEXT"),
        ]
    }

    fn id(&self) -> String {
        self.id.clone()
    }

    fn column_values(&self) -> Vec<(&'static str, Box<dyn ColumnValue>)> {
        vec![
            ("id", Box::new(self.id.clone())),
            ("content", Box::new(self.content.clone())),
            ("desc", Box::new(self.desc.clone())),
        ]
    }
}

fn main() {
    // 日志初始化
    env_logger::init();

    // 环境变量
    let env_property = property::Env::init();

    let matches = Command::new("rig-search")
        .about("基于LLM解释代码，并向量化搜索的工具")
        .arg(
            Arg::new("vector")
                .short('v')
                .long("vector")
                .value_parser(clap::value_parser!(String))
                .help("LLM解释并向量化存储，选择语言"),
        )
        .arg(
            Arg::new("search")
                .short('s')
                .long("search")
                .value_parser(clap::value_parser!(String))
                .help("在向量库中搜索"),
        )
        .arg(
            Arg::new("llm")
                .short('l')
                .long("llm")
                .value_parser(clap::value_parser!(String))
                .help("在向量库中搜索，并使用大模型归纳"),
        )
        .arg(
            Arg::new("interactive")
                .short('i')
                .long("interactive")
                .num_args(1)
                .help("交互式模式"),
        )
        .get_matches();

    let project_path = env::current_dir().unwrap().display().to_string();
    let vector_db_path = env::current_dir()
        .unwrap()
        .join("vector.db")
        .display()
        .to_string();

    // 建立索引
    if let Some(language) = matches.get_one::<String>("vector") {
        // 阻塞运行
        let rt = tokio::runtime::Runtime::new().unwrap();
        if language == "java" {
            rt.block_on(vector_project(
                &Java_Parser {},
                &project_path,
                &vector_db_path,
            ));
        }
        if language == "go" {
            rt.block_on(vector_project(
                &Go_Parser {},
                &project_path,
                &vector_db_path,
            ));
        }
        if language == "js" {
            rt.block_on(vector_project(
                &JavaScript_Parser {},
                &project_path,
                &vector_db_path,
            ));
        }
        if language == "rust" {
            rt.block_on(vector_project(
                &Rust_Parser {},
                &project_path,
                &vector_db_path,
            ));
        }
        if language == "vue" {
            rt.block_on(vector_project(
                &Vue_Parser {},
                &project_path,
                &vector_db_path,
            ));
        }
    }

    // 搜索代码
    if let Some(search_item) = matches.get_one::<String>("search") {
        let searh_result: Vec<(f64, String, serde_json::Value)> =
            search(search_item, env_property.search_count, &vector_db_path);
        // 打印搜索结果
        for (index, doc) in searh_result.iter().enumerate() {
            let v: &serde_json::Value = &doc.2;

            // 参数
            let file_name_and_func = v["id"].as_str().unwrap_or("");
            let mut language_suffix: String = "".to_string();

            // 获取文件后缀
            let rex = Regex::new(r"^[^.]*\.(?P<suffix>[^.:]*):.*$").unwrap();
            if let Some(cap) = rex.captures(file_name_and_func) {
                language_suffix = cap["suffix"].to_string();
            }

            println!(
                "@ {}\n{}\n{}\n",
                file_name_and_func.red(),
                v["desc"].as_str().unwrap_or("").white(),
                util::codehighlight::hightlight(
                    &v["content"].as_str().unwrap_or(""),
                    language_suffix.as_str()
                )
            );
        }
    }

    // 搜索并归纳
    if let Some(question) = matches.get_one::<String>("llm") {
        let searh_result: Vec<(f64, String, serde_json::Value)> =
            search(question, 15, &vector_db_path);

        // 打印搜索结果
        for (index, doc) in searh_result.iter().enumerate() {
            let v: &serde_json::Value = &doc.2;

            // 参数
            let file_name_and_func = v["id"].as_str().unwrap_or("");
            let mut language_suffix: String = "".to_string();

            // 获取文件后缀
            let rex = Regex::new(r"^[^.]*\.(?P<suffix>[^.:]*):.*$").unwrap();
            if let Some(cap) = rex.captures(file_name_and_func) {
                language_suffix = cap["suffix"].to_string();
            }

            println!(
                "{}\n{}\n{}\n",
                file_name_and_func.red(),
                v["desc"].as_str().unwrap_or("").white(),
                util::codehighlight::hightlight(
                    &v["content"].as_str().unwrap_or(""),
                    language_suffix.as_str()
                )
            );
        }

        // llm处理
        let llm_summarize = llm_summarize(question, searh_result);
        println!("# 归纳\n{}", llm_summarize.red());
    }
}

async fn vector_project(
    language_parser: &dyn language::Language_Parser,
    project_path: &String,
    vector_db_path: &String,
) -> Result<(), anyhow::Error> {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }
    println!(
        "project_path {:?}, vector_db_path {:?}",
        project_path, vector_db_path
    );

    let env = property::Env::init();
    let client = Client::from_url(&env.embedding_endpoint);
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open(vector_db_path).await?;

    // 打开存储库
    let vector_store: SqliteVectorStore<rig::providers::ollama::EmbeddingModel, Document> =
        SqliteVectorStore::new(conn, &model).await?;

    // llm解析函数
    let client =
        providers::deepseek::Client::from_url(&env.code_llm_api_key, &env.code_llm_endpoint);
    let agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel> = client
        .agent(&env.code_llm_model)
        .temperature(1.0)
        .preamble("根据输入的代码简要输出实现的功能，不要超过220字")
        .build();

    // 遍历文件夹
    let funcs_chunks = WalkDir::new(project_path)
        .sort_by_file_name()
        .into_iter()
        .flat_map(|entry| {
            if let Ok(path) = entry {
                let full_path = path.path().display().to_string();
                // todo
                if language_parser.filter(&full_path) {
                    // 解析文档
                    let funcs: Vec<String> = language_parser.find_functions(full_path.clone());
                    let origin_len = funcs.len();
                    let valid_funcs: Vec<(String, usize, String)> = funcs
                        .iter()
                        .enumerate()
                        .into_iter()
                        .filter(|func| func.1.len() > 60)
                        .map(|func| (full_path.clone(), func.0, func.1.clone()))
                        .collect();
                    println!("{} {}/{}", full_path, valid_funcs.len(), origin_len);
                    return valid_funcs;
                }
            }
            Vec::new()
        })
        .chunks(env.code_llm_thread_number);

    // 遍历方法
    for funcs_chunk in &funcs_chunks {
        let valid_funcs: Vec<(String, usize, String)> = funcs_chunk.collect();

        // llm批量处理
        let docs = llm_desc_funcs(valid_funcs, &agent).await;

        // 打印llm处理结果
        for doc in &docs {
            debug!(
                "llm\n{}\n{}\n{}\n",
                doc.id.red(),
                doc.desc.green(),
                doc.content.bright_black()
            )
        }

        // 向量化并保存到数据库
        if let Ok(embedding) = EmbeddingsBuilder::new(model.clone()).documents(docs) {
            if let Ok(emb) = embedding.build().await {
                vector_store.add_rows(emb).await;
            }
        }
    }

    Ok(())
}

async fn llm_desc_funcs(
    funcs: Vec<(String, usize, String)>,
    agent: &rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel>,
) -> Vec<Document> {
    let mut futures = vec![];
    for func_union in funcs {
        futures.push(llm_desc(func_union.0, func_union.1, func_union.2, agent));
    }

    let future_results = join_all(futures).await;
    let results = future_results.into_iter().filter_map(Result::ok).collect();

    return results;
}

async fn llm_desc(
    full_path: String,
    index: usize,
    func: String,
    agent: &rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel>,
) -> Result<Document, rig::completion::PromptError> {
    let resp: Result<String, rig::completion::PromptError> =
        agent.chat(format!("{}", func), vec![]).await;
    if let Ok(msg) = resp.as_ref() {
        // println!("{}\n{}", func.green(), msg.red());
        // 构建doc
        Result::Ok(Document {
            id: format!("{}:{}", full_path, index),
            content: func.clone(),
            desc: msg.clone(),
        })
    } else {
        println!("{:?}", resp);
        Result::Err(resp.unwrap_err())
    }
}

fn search(
    search_item: &String,
    number: usize,
    vector_db_path: &String,
) -> Vec<(f64, String, serde_json::Value)> {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    // 阻塞运行
    let rt = tokio::runtime::Runtime::new().unwrap();

    // 大模型运行
    let env = property::Env::init();
    let client = Client::from_url(&env.embedding_endpoint);
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn_future = Connection::open(vector_db_path);
    let conn = rt.block_on(conn_future).expect("get connect fail");

    // 打开存储库
    let vector_store_future = SqliteVectorStore::new(conn, &model);
    let vector_store: SqliteVectorStore<providers::ollama::EmbeddingModel, Document> = rt
        .block_on(vector_store_future)
        .expect("open sqlite db fail");

    // 搜索
    let index = vector_store.index(model);
    let results_future = index.top_n(search_item, number * 10);
    let results = rt.block_on(results_future).expect("get search resul fail");

    // reranker重排序
    let desc_list: Vec<String> = results
        .iter()
        .map(|doc| {
            let v: &serde_json::Value = &doc.2;
            v["desc"].as_str().unwrap_or("").to_string().clone()
        })
        .collect();
    let reranker = util::rerank::Reranker {
        end_point: env.rerank_endpoint.clone(),
    };
    let results: Vec<(f64, String, serde_json::Value)> =
        rt.block_on(reranker.rerank(search_item.to_string(), desc_list.clone(), results.clone()));
    let mut results: Vec<(f64, String, serde_json::Value)> =
        results.into_iter().take(number).collect();

    results
}

fn llm_summarize(
    question: &String,
    search_result: Vec<(f64, String, serde_json::Value)>,
) -> String {
    // 阻塞运行
    let rt = tokio::runtime::Runtime::new().unwrap();

    let env: property::Env = property::Env::init();
    let client = providers::deepseek::Client::from_url(&env.llm_api_key, &env.llm_endpoint);
    let agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel> = client
        .agent(&env.llm_model)
        .temperature(0.5)
        .preamble(
            format!(
                "你是代码解析助手，根据提供的资料详细解答问题。依次输出详细解释，并且要有依据。"
            )
            .as_str(),
        )
        .build();

    // 构建历史
    let mut chat_history: Vec<Message> = search_result
        .iter()
        .map(|res| {
            let v: &serde_json::Value = &res.2;
            let desc = v["desc"].to_string();
            let content = v["content"].to_string();
            Message::User {
                content: OneOrMany::one(UserContent::text(format!("{}/{}", desc, content))),
            }
        })
        .collect();

    // 创建
    chat_history.push(Message::User {
        content: OneOrMany::one(UserContent::text(format!("回答问题:\n{}", question))),
    });

    let res = rt.block_on(agent.chat("", chat_history));
    res.unwrap_or("".to_string())
}

fn interactive(vector_db_path: &String) {
    let mut chat_history: Vec<Message> = Vec::new();
    loop {
        // 阻塞运行
        let rt = tokio::runtime::Runtime::new().unwrap();

        // 输入
        let mut input = String::new();
        io::stdin().read_line(&mut input);

        chat_history.push(Message::User {
            content: OneOrMany::one(UserContent::text(input)),
        });

        let env: property::Env = property::Env::init();
        let client = providers::deepseek::Client::from_url(&env.llm_api_key, &env.llm_endpoint);
        let agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel> = client
            .agent(&env.llm_model)
            .temperature(0.5)
            .preamble(
                format!(
                "你是代码解析助手，根据提供的资料详细解答问题。依次输出详细解释，并且要有依据。你也可以通过使用tool得到更多信息"
            )
                .as_str(),
            )
            .tool(tool::search::SearchCmd {
                number: 5,
                vector_db_path: vector_db_path.clone(),
            })
            .build();

        // llm 回答
        let res = rt.block_on(agent.chat("", chat_history.clone()));
        if let Ok(reply) = res {
            println!("{}", reply);
            chat_history.push(Message::Assistant {
                content: OneOrMany::one(AssistantContent::text(reply)),
            });
        }
    }
}

#[tokio::test]
async fn llm_test() {
    let env: property::Env = property::Env::init();
    let client =
        providers::openai::Client::from_url(&env.llm_api_key, "https://api.siliconflow.cn/v1");
    let agent = client
        .agent("Pro/Qwen/Qwen2.5-Coder-7B-Instruct")
        .temperature(1.0)
        // .preamble("根据输入的代码输出其功能，简要回答")
        .build();

    let resp: Result<String, rig::completion::PromptError> =
        agent.chat("你好".to_string(), vec![]).await;
    println!("{:?}", resp);
}

#[test]
fn function_desc_test() {
    let funs: Vec<String> =
        Java_Parser {}.find_functions("assets/test_src/AssetShareServiceImpl.java".to_string());

    // llm解析函数
    let env: property::Env = property::Env::init();
    let client = providers::deepseek::Client::from_url("", &env.code_llm_endpoint);
    let agent: rig::agent::Agent<providers::deepseek::DeepSeekCompletionModel> = client
        .agent("qwen2.5:7b")
        .temperature(1.0)
        .preamble("根据输入的代码输出其功能，简要回答")
        .build();

    let rt = tokio::runtime::Runtime::new().unwrap();
    for func in funs {
        let resp: Result<String, rig::completion::PromptError> =
            rt.block_on(agent.chat(format!("{}", func), vec![]));
        if let Ok(msg) = resp.as_ref() {
            println!("{}\n{}", func.green(), msg.red())
        }
    }
}

#[test]
fn file_scan_test() {
    for entry in WalkDir::new("/home/x/code/RuoYi/") {
        if let Ok(path) = entry {
            let full_path = path.path().display().to_string();
            if full_path.ends_with(".java") {
                println!("{}", full_path);
            }
        }
    }
}

#[test]
fn rerank_test() {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    // 参数
    let question = "和物联网设备上传设备采集的数据相关的代码逻辑";

    // 阻塞运行
    let rt = tokio::runtime::Runtime::new().unwrap();

    // 嵌入式模型
    let env: property::Env = property::Env::init();
    let client = Client::from_url(&env.embedding_endpoint);
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn_future = Connection::open("vector.db");
    let conn = rt.block_on(conn_future).expect("get connect fail");

    // 打开存储库
    let vector_store_future = SqliteVectorStore::new(conn, &model);
    let vector_store: SqliteVectorStore<providers::ollama::EmbeddingModel, Document> = rt
        .block_on(vector_store_future)
        .expect("open sqlite db fail");

    // 搜索
    let index = vector_store.index(model);
    let results_future = index.top_n(question, 100);
    let results = rt.block_on(results_future).expect("get search resul fail");

    // 打印结果
    let desc_list: Vec<String> = results
        .iter()
        .map(|doc| {
            let v: &serde_json::Value = &doc.2;
            v["desc"].as_str().unwrap_or("").to_string().clone()
        })
        .collect();
    println!("{}", "排序前".green());
    for desc in &desc_list[..5] {
        println!("{}\n", desc,);
    }

    let reranker = util::rerank::Reranker {
        end_point: env.rerank_endpoint.clone(),
    };
    let after_rerank =
        rt.block_on(reranker.rerank(question.to_string(), desc_list.clone(), desc_list.clone()));
    println!("{}", "排序后".green());
    for desc in &after_rerank[..5] {
        println!("{}\n", desc,);
    }
}

#[test]
fn search_test() {
    let question = "部署合约";
    let mut results: Vec<(f64, String, serde_json::Value)> =
        search(&question.to_string(), 10, &"vector.db".to_string());

    // 打印提供给llm的参考
    for (index, doc) in results.iter().enumerate() {
        let v: &serde_json::Value = &doc.2;
        println!(
            "{} {}\n{}\n{}",
            index + 1,
            v["id"].as_str().unwrap_or("").red(),
            v["desc"].as_str().unwrap_or(""),
            v["content"].as_str().unwrap_or("").green()
        );
    }

    // llm处理
    let llm_summarize = llm_summarize(&"查询".to_string(), results);
    println!("{}", llm_summarize.red());
}

#[test]
fn regex_test() {
    let rex = Regex::new(r"^[^.]*\.(?P<suffix>[^.:]*):.*$").unwrap();
    if let Some(cap) =
        rex.captures("/home/x/code/code-rig/assets/test_src/AssetShareServiceImpl.java:11")
    {
        assert!(&cap["suffix"] == "java");
    }
}
