use std::{
    env,
    fs::{self, File},
    io::{self, BufRead, BufReader, Read},
    path::{self, Path, PathBuf},
};

use clap::{Arg, ArgAction, Command};
use colored::Colorize;
use futures::{future::join_all, Stream, StreamExt, TryStreamExt};
use indicatif::{ProgressBar, ProgressStyle};
use itertools::Itertools;
use language::{
    go_parser::Go_Parser, java_parser::Java_Parser, javascript_parser::JavaScript_Parser,
    rust_parser::Rust_Parser, vue_parser::Vue_Parser, Language_Parser,
};
use log::{debug, info};
use regex::Regex;
use rig::{
    agent::PromptRequest,
    client::{completion::CompletionClientDyn, EmbeddingsClient},
    completion::{Chat, Prompt, PromptError},
    embeddings::EmbeddingsBuilder,
    message::{AssistantContent, Message, UserContent},
    providers::{
        self,
        ollama::{Client, EmbeddingResponse},
    },
    vector_store::{
        request::VectorSearchRequestBuilder, VectorSearchRequest, VectorStoreIndex,
        VectorStoreIndexDyn,
    },
    Embed, OneOrMany,
};
use rig_sqlite::{
    Column, ColumnValue, SqliteVectorIndex, SqliteVectorStore, SqliteVectorStoreTable,
};
use rusqlite::ffi::sqlite3_auto_extension;
use serde::Deserialize;
use sqlite_vec::sqlite3_vec_init;
use tokio::{self};
use tokio_rusqlite::Connection;
use util::{code_completion, interact, property, text_segment::TextSegment, web_spider::WebCrawl};
use walkdir::WalkDir;

use crate::util::{codeStats::stats_code, interact::HandlerInput};

mod language;
mod tool;
mod util;

#[derive(Embed, Clone, Debug, Deserialize)]
struct Document {
    id: String,
    content: String,
    #[embed]
    desc: String,
}

impl SqliteVectorStoreTable for Document {
    fn name() -> &'static str {
        "documents"
    }

    fn schema() -> Vec<Column> {
        vec![
            Column::new("id", "TEXT PRIMARY KEY"),
            Column::new("content", "TEXT"),
            Column::new("desc", "TEXT"),
        ]
    }

    fn id(&self) -> String {
        self.id.clone()
    }

    fn column_values(&self) -> Vec<(&'static str, Box<dyn ColumnValue>)> {
        vec![
            ("id", Box::new(self.id.clone())),
            ("content", Box::new(self.content.clone())),
            ("desc", Box::new(self.desc.clone())),
        ]
    }
}

#[tokio::main]
async fn main() {
    // 日志初始化
    env_logger::init();

    // 环境变量
    let env_property = property::Env::init();

    let matches = Command::new("rig-search")
        .about("基于LLM解释代码，并向量化搜索的工具")
        .arg(
            Arg::new("vector")
                .short('v')
                .long("vector")
                .value_parser(clap::value_parser!(String))
                .help("LLM解释并向量化存储，选择语言"),
        )
        .arg(
            Arg::new("search")
                .short('s')
                .long("search")
                .value_parser(clap::value_parser!(String))
                .help("在向量库中搜索"),
        )
        .arg(
            Arg::new("seek")
                .short('k')
                .long("seek")
                .num_args(0)
                .action(ArgAction::SetTrue)
                .help("在向量库中多次搜索"),
        )
        .arg(
            Arg::new("llm")
                .short('l')
                .long("llm")
                .value_parser(clap::value_parser!(String))
                .help("在向量库中搜索，并使用大模型归纳"),
        )
        .arg(
            Arg::new("interactive")
                .short('i')
                .long("interactive")
                .num_args(0)
                .action(ArgAction::SetTrue)
                .help("交互式模式"),
        )
        .arg(
            Arg::new("params")
                .short('p')
                .long("params")
                .help("参数，根据任务的不同，可能需要不同的参数"),
        )
        .arg(
            Arg::new("completion_serve")
                .long("completion")
                .num_args(0)
                .action(ArgAction::SetTrue)
                .help("基于向量库提供代码补全服务"),
        )
        .arg(
            Arg::new("ai")
                .long("ai")
                .num_args(0)
                .action(ArgAction::SetTrue)
                .help("使用大模型处理输入流，并输出"),
        )
        .arg(
            Arg::new("stats_code")
                .long("stats")
                .num_args(0)
                .action(ArgAction::SetTrue)
                .help("使用大模型处理输入流，并输出"),
        )
        .get_matches();

    let project_path = env::current_dir().unwrap().display().to_string();
    let vector_db_path = env::current_dir()
        .unwrap()
        .join("vector.db")
        .display()
        .to_string();

    // 建立索引
    if let Some(language) = matches.get_one::<String>("vector") {
        if language == "java" {
            let _ = vector_project(&Java_Parser {}, &project_path, &vector_db_path).await;
        }
        if language == "go" {
            let _ = vector_project(&Go_Parser {}, &project_path, &vector_db_path).await;
        }
        if language == "js" {
            let _ = vector_project(&JavaScript_Parser {}, &project_path, &vector_db_path).await;
        }
        if language == "rust" {
            let _ = vector_project(&Rust_Parser {}, &project_path, &vector_db_path).await;
        }
        if language == "vue" {
            let _ = vector_project(&Vue_Parser {}, &project_path, &vector_db_path).await;
        }
        if language == "json" {
            // 直接解析json文件
            let file_name: String = match matches.get_one::<String>("params") {
                Some(params) => params.clone(),
                None => {
                    panic!("分割文本需要传入文件名");
                }
            };
            let json_file_pathbuf: PathBuf = project_path.clone().into();
            let json_file_path = json_file_pathbuf.join(file_name).display().to_string();
            let _ = vector_json(&json_file_path, &vector_db_path).await;
        }
        if language == "txt" {
            // 分割文本
            let file_name: String = match matches.get_one::<String>("params") {
                Some(params) => params.clone(),
                None => {
                    panic!("分割文本需要传入文件名");
                }
            };
            let _ = vector_txt(&file_name, &vector_db_path).await;
        }
        if language == "url" {
            // 分割文本
            let base_url: String = match matches.get_one::<String>("params") {
                Some(params) => params.clone(),
                None => {
                    panic!("分割文本需要传入文件名");
                }
            };
            let _ = vector_web(&base_url, &vector_db_path).await;
        }
    }

    // 搜索代码
    if let Some(search_item) = matches.get_one::<String>("search") {
        let searh_result: Vec<(f64, String, serde_json::Value)> =
            search(search_item, env_property.search_count, &vector_db_path).await;
        // 打印搜索结果
        for (index, doc) in searh_result.iter().enumerate() {
            let v: &serde_json::Value = &doc.2;

            // 参数
            let file_name_and_func = v["id"].as_str().unwrap_or("");
            let mut language_suffix: String = "".to_string();

            // 获取文件后缀
            let rex = Regex::new(r"^[^.]*\.(?P<suffix>[^.:]*):.*$").unwrap();
            if let Some(cap) = rex.captures(file_name_and_func) {
                language_suffix = cap["suffix"].to_string();
            }

            println!(
                "@ {}\n{}\n{}\n",
                file_name_and_func.red(),
                v["desc"].as_str().unwrap_or("").white(),
                util::codehighlight::hightlight(
                    &v["content"].as_str().unwrap_or(""),
                    language_suffix.as_str()
                )
            );
        }
    }

    // 搜索并归纳
    if let Some(question) = matches.get_one::<String>("llm") {
        let searh_result: Vec<(f64, String, serde_json::Value)> =
            search(question, 15, &vector_db_path).await;

        // 打印搜索结果
        for (index, doc) in searh_result.iter().enumerate() {
            let v: &serde_json::Value = &doc.2;

            // 参数
            let file_name_and_func = v["id"].as_str().unwrap_or("");
            let mut language_suffix: String = "".to_string();

            // 获取文件后缀
            let rex = Regex::new(r"^[^.]*\.(?P<suffix>[^.:]*):.*$").unwrap();
            if let Some(cap) = rex.captures(file_name_and_func) {
                language_suffix = cap["suffix"].to_string();
            }

            println!(
                "{}\n{}\n{}\n",
                file_name_and_func.red(),
                v["desc"].as_str().unwrap_or("").white(),
                util::codehighlight::hightlight(
                    &v["content"].as_str().unwrap_or(""),
                    language_suffix.as_str()
                )
            );
        }

        // llm处理
        let llm_summarize = llm_summarize(question, searh_result).await;
        println!("# 归纳\n{}", llm_summarize);
    }

    if matches.get_flag("interactive") {
        interactive(&vector_db_path).await;
    }

    if matches.get_flag("seek") {
        seek(10, &vector_db_path).await;
    }

    if matches.get_flag("completion_serve") {
        let _ = code_completion::serve(&vector_db_path).await;
    }

    if matches.get_flag("ai") {
        let _ = ai().await;
    }

    if matches.get_flag("stats_code") {
        let suffix_list: Vec<&str> = vec!["go", "java", "js", "rs"];

        let es: Vec<(String, u64)> = stats_code(project_path.as_str());
        let es: Vec<(String, u64)> = es
            .into_iter()
            .filter(|(suffix, file_size)| {
                if suffix_list.contains(&suffix.as_str()) {
                    return true;
                }
                return false;
            })
            .collect();

        //
        let total_size: u64 = es.iter().fold(0u64, |mut acc, (k, v)| {
            acc += v;
            acc
        });

        println!("total size: {total_size}");

        for (suffix, size) in es.iter() {
            println!(
                "{}\t{}\t{:.3}%",
                suffix,
                size,
                (*size as f64 / total_size as f64) * 100f64
            );
        }
    }
}

async fn ai() -> Result<(), anyhow::Error> {
    let env = property::Env::init();
    let client = providers::openrouter::Client::builder(&env.llm_api_key)
        .base_url(&env.llm_endpoint)
        .build();
    let agent = client.agent(&env.llm_model).temperature(1.0).build();

    // 读取输入
    let mut input = String::new();
    io::stdin().read_to_string(&mut input);

    // ai解析
    let resp: Result<String, rig::completion::PromptError> = agent.chat(&input, vec![]).await;
    if let Ok(msg) = resp.as_ref() {
        println!("{}", msg.trim());
    } else {
        eprintln!("{:?}", resp);
    }

    Ok(())
}

async fn vector_json(json_path: &String, vector_db_path: &String) -> Result<(), anyhow::Error> {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    let env = property::Env::init();
    let client = Client::builder().base_url(&env.embedding_endpoint).build();
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open(vector_db_path).await?;

    // 打开存储库
    let vector_store = SqliteVectorStore::new(conn, &model).await?;

    // 读取json文件
    let mut file = File::open(json_path).unwrap();
    let mut content = String::new();
    file.read_to_string(&mut content);
    let documents: Vec<Document> = serde_json::from_str::<Vec<Document>>(&content).unwrap();

    // 进度条
    let pb = ProgressBar::new(documents.len() as u64);
    let style = ProgressStyle::default_bar()
        // 可选：为进度条添加一些额外的特性，比如估计剩余时间、每秒速度等
        .progress_chars("=>-");
    pb.set_style(style);

    for doc_chunk in documents.chunks(env.code_llm_thread_number) {
        let docs: Vec<Document> = doc_chunk.to_vec();
        pb.inc(docs.len() as u64);
        // 向量化并保存到数据库
        if let Ok(embedding) = EmbeddingsBuilder::new(model.clone()).documents(docs) {
            let resp = embedding.build().await;
            if let Ok(emb) = resp {
                vector_store.add_rows(emb).await;
            } else {
                println!("{:?}", resp);
            }
        }
    }

    pb.finish_with_message("done");

    Ok(())
}

async fn vector_web(base_url: &String, vector_db_path: &String) -> Result<(), anyhow::Error> {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    let env = property::Env::init();
    let client = Client::builder().base_url(&env.embedding_endpoint).build();
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open(vector_db_path).await?;
    let expect_length = 0;

    // 打开存储库
    let vector_store = SqliteVectorStore::new(conn, &model).await?;

    let mut id: usize = 0;
    let mut w = WebCrawl::new(&base_url.to_string());
    let mut text_segment = TextSegment::new(env.part_expect_length);
    for content in w.paragraph_iter(expect_length) {
        info!("{}", content);
        let docs: Vec<Document> = text_segment
            .group(&content)
            .into_iter()
            .map(|para| {
                id += 1;
                Document {
                    id: id.to_string(),
                    content: "".to_string(),
                    desc: para.clone(),
                }
            })
            .collect();
        if docs.len() > 0 {
            // 向量化并保存到数据库
            if let Ok(embedding) = EmbeddingsBuilder::new(model.clone()).documents(docs) {
                let resp = embedding.build().await;
                if let Ok(emb) = resp {
                    vector_store.add_rows(emb).await;
                } else {
                    println!("{:?}", resp);
                }
            }
        }
    }

    // 最后一个文本段
    if let Some(last_content) = text_segment.end() {
        let docs = vec![Document {
            id: id.to_string(),
            content: last_content.clone(),
            desc: last_content.clone(),
        }];
        // 向量化并保存到数据库
        if let Ok(embedding) = EmbeddingsBuilder::new(model.clone()).documents(docs) {
            let resp = embedding.build().await;
            if let Ok(emb) = resp {
                vector_store.add_rows(emb).await;
            } else {
                println!("{:?}", resp);
            }
        }
    }

    Ok(())
}

async fn vector_txt(file_name: &String, vector_db_path: &String) -> Result<(), anyhow::Error> {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    let env = property::Env::init();
    let client = Client::builder().base_url(&env.embedding_endpoint).build();
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open(vector_db_path).await?;
    let expect_length = env.part_expect_length;

    // 打开存储库
    let vector_store = SqliteVectorStore::new(conn, &model).await?;

    // 按行读取文件
    let file: File = File::open(&file_name).expect("文件不存在");
    let metadata = &file.metadata().expect("文件不存在");
    let file_size = metadata.len();
    let reader: BufReader<File> = BufReader::new(file);

    // 进度条(粗略估计)
    let pb = ProgressBar::new(file_size);
    let style = ProgressStyle::default_bar()
        // 可选：为进度条添加一些额外的特性，比如估计剩余时间、每秒速度等
        .progress_chars("=>-");
    pb.set_style(style);

    // todo 写法有点蠢
    let mut id: u64 = 0;
    let mut sentence: String = String::new();
    for line_result in reader.lines() {
        if let Ok(line) = line_result {
            // 进度条
            pb.inc(line.len() as u64 + 1);
            let parts: Vec<&str> = line
                .split(|c| {
                    let separators = [',', '|', ';', '.', ':', '，', '。'];
                    separators.contains(&c)
                })
                .collect();

            for part in parts {
                // todo 这里计算的是字节长度
                if sentence.len() < expect_length {
                    if (part.len() > 0) {
                        sentence.push_str(",");
                        sentence.push_str(part);
                    }
                } else {
                    // todo 批量化
                    // 向量化
                    pb.println(format!("{} {}", sentence.len(), sentence));
                    let docs: Vec<Document> = vec![sentence]
                        .iter()
                        .enumerate()
                        .map(|part| {
                            id += 1;
                            Document {
                                id: id.to_string(),
                                content: part.1.to_string(),
                                desc: part.1.to_string(),
                            }
                        })
                        .collect();

                    // 向量化并保存到数据库
                    if let Ok(embedding) = EmbeddingsBuilder::new(model.clone()).documents(docs) {
                        let resp = embedding.build().await;
                        if let Ok(emb) = resp {
                            vector_store.add_rows(emb).await;
                        } else {
                            println!("{:?}", resp);
                        }
                    }
                    sentence = part.to_string();
                }
            }
        }
    }

    // todo 优化
    if sentence.len() > 0 {
        let docs: Vec<Document> = vec![sentence]
            .iter()
            .enumerate()
            .map(|part| Document {
                id: id.to_string(),
                content: part.1.to_string(),
                desc: part.1.to_string(),
            })
            .collect();

        // 向量化并保存到数据库
        if let Ok(embedding) = EmbeddingsBuilder::new(model.clone()).documents(docs) {
            let resp = embedding.build().await;
            if let Ok(emb) = resp {
                vector_store.add_rows(emb).await;
            } else {
                println!("{:?}", resp);
            }
        }
    }

    pb.finish_with_message("done");

    Ok(())
}

async fn vector_project(
    language_parser: &dyn language::Language_Parser,
    project_path: &String,
    vector_db_path: &String,
) -> Result<(), anyhow::Error> {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }
    println!(
        "project_path {:?}, vector_db_path {:?}",
        project_path, vector_db_path
    );

    let env = property::Env::init();
    let client = Client::builder().base_url(&env.embedding_endpoint).build();
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open(vector_db_path).await?;

    // 打开存储库
    let vector_store = SqliteVectorStore::new(conn, &model).await?;

    // llm解析函数
    let client = providers::openrouter::Client::builder(&env.code_llm_api_key)
        .base_url(&env.code_llm_endpoint)
        .build();
    let agent: rig::agent::Agent<rig::client::completion::CompletionModelHandle> = client
        .agent(&env.code_llm_model)
        .temperature(1.0)
        .preamble("根据输入的代码简要输出实现的功能，不要超过220字")
        .build();

    // 统计文件数
    let file_counts = WalkDir::new(project_path)
        .sort_by_file_name()
        .follow_links(true)
        .into_iter()
        .filter(|entry| {
            if let Ok(path) = entry {
                let full_path: String = path.path().display().to_string();
                return language_parser.filter(&full_path);
            } else {
                return false;
            }
        })
        .count();
    let pb = ProgressBar::new(file_counts as u64);
    let template = "[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}";
    let style = ProgressStyle::default_bar()
        // 可选：为进度条添加一些额外的特性，比如估计剩余时间、每秒速度等
        .progress_chars("=>-");
    pb.set_style(style);

    // 遍历文件夹
    let funcs_chunks = WalkDir::new(project_path)
        .sort_by_file_name()
        .follow_links(true)
        .into_iter()
        .flat_map(|entry| {
            if let Ok(path) = entry {
                let full_path: String = path.path().display().to_string();
                let relative_path: String = full_path
                    .strip_prefix(project_path)
                    .map(|item| item.to_string())
                    .unwrap_or(full_path.clone());
                // todo
                if language_parser.filter(&full_path) {
                    // 更新进度条
                    pb.inc(1);
                    // 解析文档
                    let funcs: Vec<String> = language_parser.find_functions(full_path.clone());
                    let origin_len = funcs.len();
                    let valid_funcs: Vec<(String, usize, String)> = funcs
                        .iter()
                        .enumerate()
                        .into_iter()
                        .filter(|func| func.1.len() > 60)
                        .map(|func| (relative_path.clone(), func.0, func.1.clone()))
                        .collect();
                    pb.println(format!(
                        "{} {}/{}",
                        relative_path,
                        valid_funcs.len(),
                        origin_len
                    ));
                    return valid_funcs;
                }
            }
            Vec::new()
        })
        .chunks(env.code_llm_thread_number);

    // 遍历方法
    for funcs_chunk in &funcs_chunks {
        let valid_funcs: Vec<(String, usize, String)> = funcs_chunk.collect();

        // llm批量处理
        let docs = llm_desc_funcs(valid_funcs, &agent).await;

        // 打印llm处理结果
        for doc in &docs {
            debug!(
                "llm\n{}\n{}\n{}\n",
                doc.id.red(),
                doc.desc.green(),
                doc.content.bright_black()
            )
        }

        // 向量化并保存到数据库
        if let Ok(embedding) = EmbeddingsBuilder::new(model.clone()).documents(docs) {
            let resp = embedding.build().await;
            if let Ok(emb) = resp {
                vector_store.add_rows(emb).await;
            } else {
                println!("{:?}", resp);
            }
        }
    }
    pb.finish_with_message("done");

    Ok(())
}

async fn llm_desc_funcs<'a>(
    funcs: Vec<(String, usize, String)>,
    agent: &rig::agent::Agent<rig::client::completion::CompletionModelHandle<'a>>,
) -> Vec<Document> {
    let mut futures = vec![];
    for func_union in funcs {
        futures.push(llm_desc(func_union.0, func_union.1, func_union.2, agent));
    }

    let future_results = join_all(futures).await;
    let results = future_results.into_iter().filter_map(Result::ok).collect();

    return results;
}

async fn llm_desc<'a>(
    full_path: String,
    index: usize,
    func: String,
    agent: &rig::agent::Agent<rig::client::completion::CompletionModelHandle<'a>>,
) -> Result<Document, rig::completion::PromptError> {
    let resp: Result<String, rig::completion::PromptError> =
        agent.chat(format!("{}", func), vec![]).await;
    if let Ok(msg) = resp.as_ref() {
        // println!("{}\n{}", func.green(), msg.red());
        // 构建doc
        Result::Ok(Document {
            id: format!("{}:{}", full_path, index),
            content: func.clone(),
            desc: msg.clone(),
        })
    } else {
        println!("{:?}", resp);
        Result::Err(resp.unwrap_err())
    }
}

pub async fn search(
    search_item: &String,
    number: usize,
    vector_db_path: &String,
) -> Vec<(f64, String, serde_json::Value)> {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    // 不存在sqllite文件
    let vector_db_path_file = Path::new(vector_db_path);
    if !vector_db_path_file.exists() {
        return vec![];
    }

    // 大模型运行
    let env = property::Env::init();
    let client = Client::builder().base_url(&env.embedding_endpoint).build();
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open(vector_db_path)
        .await
        .expect("get connect fail");

    // 打开存储库
    let vector_store: SqliteVectorStore<
        providers::ollama::EmbeddingModel<reqwest::Client>,
        Document,
    > = SqliteVectorStore::new(conn, &model)
        .await
        .expect("open sqlite db fail");

    // 搜索
    let query_req = VectorSearchRequest::builder()
        .samples(number as u64 * 10)
        .query(search_item)
        .build()
        .unwrap();
    let index = vector_store.index(model);
    let results = index.top_n(query_req).await.expect("get search resul fail");

    // reranker重排序
    let desc_list: Vec<String> = results
        .iter()
        .map(|doc| {
            let v: &serde_json::Value = &doc.2;
            v["desc"].as_str().unwrap_or("").to_string().clone()
        })
        .collect();
    let reranker = util::rerank::Reranker {
        end_point: env.rerank_endpoint.clone(),
    };
    let results: Vec<(f64, String, serde_json::Value)> = reranker
        .rerank(search_item.to_string(), desc_list.clone(), results.clone())
        .await;
    let mut results: Vec<(f64, String, serde_json::Value)> =
        results.into_iter().take(number).collect();

    results
}

async fn llm_summarize(
    question: &String,
    search_result: Vec<(f64, String, serde_json::Value)>,
) -> String {
    let env: property::Env = property::Env::init();
    let client = providers::openrouter::Client::builder(&env.llm_api_key)
        .base_url(&env.llm_endpoint)
        .build();
    let agent = client
        .agent(&env.llm_model)
        .temperature(0.5)
        .preamble(
            format!(
                "你是代码解析助手，根据提供的资料详细解答问题。依次输出详细解释，并且要有依据。"
            )
            .as_str(),
        )
        .build();

    // 构建历史
    let mut chat_history: Vec<Message> = search_result
        .iter()
        .map(|res| {
            let v: &serde_json::Value = &res.2;
            let desc = v["desc"].to_string();
            let content = v["content"].to_string();
            Message::User {
                content: OneOrMany::one(UserContent::text(format!("{}/{}", desc, content))),
            }
        })
        .collect();

    // 创建
    chat_history.push(Message::User {
        content: OneOrMany::one(UserContent::text(format!("回答问题:\n{}", question))),
    });

    let res = agent.chat("", chat_history).await;
    res.unwrap_or("".to_string())
}

async fn interactive(vector_db_path: &String) {
    let mut chat_history: Vec<Message> = Vec::new();

    let env: property::Env = property::Env::init();
    let client = providers::openrouter::Client::builder(&env.llm_api_key)
        .base_url(&env.llm_endpoint)
        .build();
    let agent = client
        .agent(&env.llm_model)
        .temperature(0.5)
        .tool(tool::RagQuery::SearchCmd {
            number: 5,
            vector_db_path: vector_db_path.clone(),
        })
        .tool(tool::javaParse::JavaParseCmd {
            project_path: ".".to_string(),
        })
        .tool(tool::grep::SearchFileCmd {
            project_path: ".".to_string(),
        })
        .preamble("你是代码解析助手，根据提供的资料详细解答问题。依次输出详细解释，并且要有依据。输出端为终端，不要使用markdown，使用终端能识别的字体格式。")
        .build();
    let mut interact_callback = Interact {
        chat_history: &mut chat_history,
        agent: &agent,
    };
    let _ = interact::run_interact(&mut interact_callback).await;
}

pub struct Interact<'a> {
    pub chat_history: &'a mut Vec<Message>,
    pub agent: &'a rig::agent::Agent<rig::client::completion::CompletionModelHandle<'a>>,
}

impl<'a> HandlerInput<'a> for Interact<'a> {
    async fn handle_input(&mut self, input: &String) -> String {
        self.chat_history.push(Message::User {
            content: OneOrMany::one(UserContent::text(input)),
        });

        let res: Result<String, PromptError> = PromptRequest::new(self.agent, "")
            .with_history(&mut self.chat_history.clone())
            .await;
        // let res: Result<String, PromptError> = self.agent.chat("", self.chat_history.clone()).await;
        if let Ok(reply) = res {
            self.chat_history.push(Message::Assistant {
                id: None,
                content: OneOrMany::one(AssistantContent::text(reply.clone())),
            });
            return format!("{}\n", reply.trim());
        } else {
            return format!("返回错误:{:?}\n", res);
        }
    }
}

async fn seek(number: usize, vector_db_path: &String) {
    let mut seek_callback = SeekCallback {
        number: number,
        vector_db_path: vector_db_path,
    };
    let _ = interact::run_interact(&mut seek_callback).await;
}

pub struct SeekCallback<'a> {
    pub number: usize,
    pub vector_db_path: &'a String,
}

impl<'a> HandlerInput<'a> for SeekCallback<'a> {
    async fn handle_input(&mut self, input: &String) -> String {
        // 阻塞运行
        let searh_result: Vec<(f64, String, serde_json::Value)> =
            search(input, self.number, self.vector_db_path).await;

        // 打印搜索结果
        let mut result = Vec::new();
        for (index, doc) in searh_result.iter().enumerate() {
            let v: &serde_json::Value = &doc.2;

            // 参数
            let file_name_and_func = v["id"].as_str().unwrap_or("");
            let mut language_suffix: String = "".to_string();

            // 获取文件后缀
            let rex = Regex::new(r"^[^.]*\.(?P<suffix>[^.:]*):.*$").unwrap();
            if let Some(cap) = rex.captures(file_name_and_func) {
                language_suffix = cap["suffix"].to_string();
            }

            result.push(format!(
                "@ {}\n{}\n{}\n",
                file_name_and_func.red(),
                v["desc"].as_str().unwrap_or("").white(),
                util::codehighlight::hightlight(
                    &v["content"].as_str().unwrap_or(""),
                    language_suffix.as_str()
                )
            ));
        }

        return result.join("\n");
    }
}

#[tokio::test]
async fn llm_test() {
    let env: property::Env = property::Env::init();
    let client = providers::openai::Client::builder(&env.llm_api_key)
        .base_url("https://api.siliconflow.cn/v1")
        .build();
    let agent = client
        .agent("Pro/Qwen/Qwen2.5-Coder-7B-Instruct")
        .temperature(1.0)
        // .preamble("根据输入的代码输出其功能，简要回答")
        .build();

    let resp: Result<String, rig::completion::PromptError> =
        agent.chat("你好".to_string(), vec![]).await;
    println!("{:?}", resp);
}

#[tokio::test]
async fn function_desc_test() {
    let funs: Vec<String> =
        Java_Parser {}.find_functions("assets/test_src/AssetShareServiceImpl.java".to_string());

    // llm解析函数
    let env: property::Env = property::Env::init();
    let client = providers::deepseek::Client::builder("")
        .base_url(&env.code_llm_endpoint)
        .build();
    let agent: rig::agent::Agent<rig::client::completion::CompletionModelHandle> = client
        .agent("qwen2.5:7b")
        .temperature(1.0)
        .preamble("根据输入的代码输出其功能，简要回答")
        .build();

    for func in funs {
        let resp: Result<String, rig::completion::PromptError> =
            agent.chat(format!("{}", func), vec![]).await;
        if let Ok(msg) = resp.as_ref() {
            println!("{}\n{}", func.green(), msg.red())
        }
    }
}

#[test]
fn file_scan_test() {
    for entry in WalkDir::new("/home/x/code/RuoYi/") {
        if let Ok(path) = entry {
            let full_path = path.path().display().to_string();
            if full_path.ends_with(".java") {
                println!("{}", full_path);
            }
        }
    }
}

#[tokio::test]
async fn rerank_test() {
    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    // 参数
    let question = "和物联网设备上传设备采集的数据相关的代码逻辑";

    // 嵌入式模型
    let env: property::Env = property::Env::init();
    let client = Client::builder().base_url(&env.embedding_endpoint).build();
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open("vector.db")
        .await
        .expect("get connect fail");

    // 打开存储库
    let vector_store: SqliteVectorStore<
        providers::ollama::EmbeddingModel<reqwest::Client>,
        Document,
    > = SqliteVectorStore::new(conn, &model)
        .await
        .expect("open sqlite db fail");

    // 搜索
    let query_req = VectorSearchRequest::builder()
        .samples(100)
        .query(question)
        .build()
        .unwrap();
    let index = vector_store.index(model);
    let results = index.top_n(query_req).await.expect("get search resul fail");

    // 打印结果
    let desc_list: Vec<String> = results
        .iter()
        .map(|doc| {
            let v: &serde_json::Value = &doc.2;
            v["desc"].as_str().unwrap_or("").to_string().clone()
        })
        .collect();
    println!("{}", "排序前".green());
    for desc in &desc_list[..5] {
        println!("{}\n", desc,);
    }

    let reranker = util::rerank::Reranker {
        end_point: env.rerank_endpoint.clone(),
    };
    let after_rerank = reranker
        .rerank(question.to_string(), desc_list.clone(), desc_list.clone())
        .await;
    println!("{}", "排序后".green());
    for desc in &after_rerank[..5] {
        println!("{}\n", desc,);
    }
}

#[tokio::test]
async fn search_test() {
    let question = "部署合约";
    let mut results: Vec<(f64, String, serde_json::Value)> =
        search(&question.to_string(), 10, &"vector.db".to_string()).await;

    // 打印提供给llm的参考
    for (index, doc) in results.iter().enumerate() {
        let v: &serde_json::Value = &doc.2;
        println!(
            "{} {}\n{}\n{}",
            index + 1,
            v["id"].as_str().unwrap_or("").red(),
            v["desc"].as_str().unwrap_or(""),
            v["content"].as_str().unwrap_or("").green()
        );
    }

    // llm处理
    // let llm_summarize = llm_summarize(&"查询".to_string(), results);
    // println!("{}", llm_summarize.red());
}

#[test]
fn regex_test() {
    let rex = Regex::new(r"^[^.]*\.(?P<suffix>[^.:]*):.*$").unwrap();
    if let Some(cap) =
        rex.captures("/home/x/code/code-rig/assets/test_src/AssetShareServiceImpl.java:11")
    {
        assert!(&cap["suffix"] == "java");
    }
}

#[tokio::test]
async fn embedding_test() {
    let search_item = "hi";
    let number = 10;
    let vector_db_path = "vector.db";

    unsafe {
        sqlite3_auto_extension(Some(std::mem::transmute(sqlite3_vec_init as *const ())));
    }

    // 大模型运行
    let env = property::Env::init();
    let client = Client::builder().base_url(&env.embedding_endpoint).build();
    let model = client.embedding_model_with_ndims(&env.embedding_model, 1024);
    let conn = Connection::open(vector_db_path)
        .await
        .expect("get connect fail");

    // 打开存储库
    let vector_store: SqliteVectorStore<
        providers::ollama::EmbeddingModel<reqwest::Client>,
        Document,
    > = SqliteVectorStore::new(conn, &model)
        .await
        .expect("open sqlite db fail");

    // 搜索
    let query_req = VectorSearchRequest::builder()
        .samples(number * 10)
        .query(search_item)
        .build()
        .unwrap();
    let index = vector_store.index(model);
    let results = index
        .top_n::<Document>(query_req)
        .await
        .expect("get search resul fail");
}
