use sqlx::query_as;
use std::sync::Arc;
use tantivy::schema::{IndexRecordOption, TextFieldIndexing, TextOptions, STRING};
use tantivy::{
    directory::MmapDirectory,
    doc,
    schema::{Field, Schema, FAST, STORED, TEXT},
    tokenizer::{StopWordFilter, TextAnalyzer},
    Index, IndexReader, IndexWriter, Opstamp,
};
use tokio::{
    fs,
    sync::{OnceCell, RwLock},
};
use tracing::{info, warn};

use crate::entities::{files::File, papers::Paper};
use crate::utils::rand_utils::calculate_hash;
use crate::{
    app_writer::AppResult,
    config::{self, CFG},
};
use crate::{
    db::db_get,
    utils::file_utils::{extract_string, get_file_path},
};
pub trait FieldTrait {}

pub trait SearchEngine<T: FieldTrait> {
    fn new(index: Index, index_reader: IndexReader, index_writer: Arc<RwLock<IndexWriter>>)
        -> Self;
    fn get_index(&self) -> &Index;
    fn get_index_reader(&self) -> &IndexReader;
    fn get_index_writer(&self) -> Arc<RwLock<IndexWriter>>;
    fn get_ids_field(&self) -> Field {
        let schema = self.get_index().schema();
        schema.get_field("_ids").unwrap()
    }
    fn get_fields(&self) -> T;

    async fn commit(&self) -> AppResult<Opstamp> {
        let index_writer = self.get_index_writer();
        let mut index_writer = index_writer.write().await;
        let ops = index_writer.commit()?;
        Ok(ops)
    }

    #[allow(dead_code)]
    async fn rollback(&self) -> AppResult<Opstamp> {
        let index_writer = self.get_index_writer();
        let mut index_writer = index_writer.write().await;
        let ops = index_writer.rollback()?;
        Ok(ops)
    }

    async fn build_index(&self) -> AppResult<()>; // 建立索引
}

async fn build_search_engine<S, T>(
    cfg: &config::SearchEngine,
    stored_filed: &str,
    text_fields: Vec<&str>,
) -> S
where
    S: SearchEngine<T>,
    T: FieldTrait,
{
    // Initialize the search engine here
    // This function should be called once at the start of the application
    let mut schema_builder = Schema::builder();
    let _ = schema_builder.add_text_field(stored_filed, STORED | STRING);
    let _ = schema_builder.add_u64_field("_ids", FAST);
    for text_field in text_fields {
        // let _ = schema_builder.add_text_field(text_field, TEXT);
        let text_filed_indexing = TextFieldIndexing::default()
            .set_tokenizer("jieba")
            .set_index_option(IndexRecordOption::WithFreqsAndPositions);
        let text_options = TextOptions::default()
        .set_indexing_options(text_filed_indexing);
        let _ = schema_builder.add_text_field(text_field, text_options);
    }
    let schema = schema_builder.build();
    let index = if let Some(dir) = &cfg.directory {
        let dir = MmapDirectory::open(dir).unwrap();
        Index::open_or_create(dir, schema).unwrap()
    } else {
        Index::create_in_ram(schema)
    };
    let index_writer: IndexWriter = index
        .writer(cfg.memory_buffer_size)
        .expect("Failed to create index writer");
    let index_writer = Arc::new(RwLock::new(index_writer));
    let index_reader = index
        .reader_builder()
        .reload_policy(tantivy::ReloadPolicy::OnCommitWithDelay)
        .try_into()
        .expect("Failed to create index reader");

    // stop word
    let mut stopwords = Vec::new();
    if let Some(stopword_files) = &cfg.stop_word_file {
        stopwords.reserve(1000);
        for path in stopword_files {
            info!("loading stopwords from {}", path);
            let _ = fs::read_to_string(path)
                .await
                .unwrap()
                .split('\n')
                .map(|s| {
                    let word = s.trim().to_string();
                    stopwords.push(word);
                });
        }
    }

    // jieba
    let jieba = tantivy_jieba::JiebaTokenizer {};
    let tokenizer = TextAnalyzer::builder(jieba)
        .filter(StopWordFilter::remove(stopwords))
        .build();
    index.tokenizers().register("jieba", tokenizer);

    // init global values
    S::new(index, index_reader, index_writer)
}

pub struct SearchEnginePaper {
    index: Index,
    index_reader: IndexReader,
    index_writer: Arc<RwLock<IndexWriter>>,
}
pub struct SearchEngineFile {
    index: Index,
    index_reader: IndexReader,
    index_writer: Arc<RwLock<IndexWriter>>,
}

impl SearchEngine<FieldPaper> for SearchEnginePaper {
    fn new(
        index: Index,
        index_reader: IndexReader,
        index_writer: Arc<RwLock<IndexWriter>>,
    ) -> Self {
        Self {
            index,
            index_reader,
            index_writer,
        }
    }
    fn get_index(&self) -> &Index {
        &self.index
    }

    fn get_index_reader(&self) -> &IndexReader {
        &self.index_reader
    }

    fn get_index_writer(&self) -> Arc<RwLock<IndexWriter>> {
        self.index_writer.clone()
    }

    async fn build_index(&self) -> AppResult<()> {
        let db = db_get()?;
        let papers = query_as!(
            Paper,
            r#"
            SELECT id, title, author, abstract as abstract_
            FROM papers;
            "#
        )
        .fetch_all(db)
        .await?;

        let fields = self.get_fields();
        let index_writer = self.get_index_writer();
        match {
            let index_writer = index_writer.read().await;
            let _ = index_writer.delete_all_documents()?;
            let ids_field = self.get_ids_field();
            for paper in papers {
                let ids = calculate_hash(paper.id.as_bytes());
                let doc = doc!(
                    fields.paper_id => paper.id,
                    ids_field => ids,
                    fields.title => paper.title,
                    fields.author => paper.author,
                    fields.abstract_ => paper.abstract_
                );
                let _ = index_writer.add_document(doc)?;
            }
            Ok(())
        } {
            Ok(_) => {
                let _ = self.commit().await?;
                Ok(())
            }
            Err(e) => {
                let _ = self.rollback().await?;
                Err(e)
            }
        }
    }

    fn get_fields(&self) -> FieldPaper {
        static FIELDS: OnceCell<FieldPaper> = OnceCell::const_new();
        if let Some(fields) = FIELDS.get() {
            *fields
        } else {
            let schema = self.get_index().schema();
            let paper_id = schema.get_field("paper_id").unwrap();
            let title = schema.get_field("title").unwrap();
            let author = schema.get_field("author").unwrap();
            let abstract_ = schema.get_field("abstract").unwrap();
            let fields = FieldPaper {
                paper_id,
                title,
                author,
                abstract_,
            };
            FIELDS.set(fields).unwrap();
            fields
        }
    }
}

impl SearchEngine<FieldFile> for SearchEngineFile {
    fn new(
        index: Index,
        index_reader: IndexReader,
        index_writer: Arc<RwLock<IndexWriter>>,
    ) -> Self {
        Self {
            index,
            index_reader,
            index_writer,
        }
    }
    fn get_index(&self) -> &Index {
        &self.index
    }

    fn get_index_reader(&self) -> &IndexReader {
        &self.index_reader
    }

    fn get_index_writer(&self) -> Arc<RwLock<IndexWriter>> {
        self.index_writer.clone()
    }

    async fn build_index(&self) -> AppResult<()> {
        let db = db_get()?;
        let files = query_as!(
            File,
            r#"SELECT file_hash, file_type
            FROM files"#
        )
        .fetch_all(db)
        .await?;

        let fields = self.get_fields();
        let index_writer = self.get_index_writer();
        match {
            let index_writer = index_writer.read().await;
            let _ = index_writer.delete_all_documents()?;
            let ids_field = self.get_ids_field();
            for file in files {
                let path = get_file_path(&file.file_hash);
                let data = match tokio::fs::read(&path).await {
                    Ok(data) => data,
                    Err(_) => {
                        warn!("Failed to read file: {}", path);
                        continue;
                    }
                };
                let text = match extract_string(&data, &file.file_type) {
                    Ok(text) if text.is_empty() => {
                        warn!("File is empty: {}", path);
                        continue;
                    }
                    Ok(text) => text,
                    Err(e) => {
                        warn!("Failed to extract text from {}: {}", path, e);
                        continue;
                    }
                };
                let ids = calculate_hash(file.file_hash.as_bytes());
                let doc = doc!(
                    fields.file_hash => file.file_hash,
                    ids_field => ids,
                    fields.content => text
                );
                let _ = index_writer.add_document(doc)?;
            }
            Ok(())
        } {
            Ok(_) => {
                let _ = self.commit().await?;
                Ok(())
            }
            Err(e) => {
                let _ = self.rollback().await?;
                Err(e)
            }
        }
    }

    fn get_fields(&self) -> FieldFile {
        static FIELDS: OnceCell<FieldFile> = OnceCell::const_new();
        if let Some(fields) = FIELDS.get() {
            *fields
        } else {
            let schema = self.get_index().schema();
            let file_hash = schema.get_field("file_hash").unwrap();
            let content = schema.get_field("content").unwrap();
            let fields = FieldFile { file_hash, content };
            FIELDS.set(fields).unwrap();
            fields
        }
    }
}

pub static SEARCH_ENGINE_PAPER: OnceCell<SearchEnginePaper> = OnceCell::const_new();
pub static SEARCH_ENGINE_FILE: OnceCell<SearchEngineFile> = OnceCell::const_new();

pub async fn get_search_engine_paper() -> AppResult<&'static SearchEnginePaper> {
    let engine = SEARCH_ENGINE_PAPER
        .get()
        .ok_or_else(|| anyhow::anyhow!("Search engine for paper not initialized"))?;
    Ok(engine)
}

pub async fn get_search_engine_file() -> AppResult<&'static SearchEngineFile> {
    let engine = SEARCH_ENGINE_FILE
        .get()
        .ok_or_else(|| anyhow::anyhow!("Search engine for file not initialized"))?;
    Ok(engine)
}

#[derive(Debug, Clone, Copy)]
pub struct FieldPaper {
    pub paper_id: Field,
    pub title: Field,
    pub author: Field,
    pub abstract_: Field,
}
impl FieldTrait for FieldPaper {}

#[derive(Debug, Clone, Copy)]
pub struct FieldFile {
    pub file_hash: Field,
    pub content: Field,
}
impl FieldTrait for FieldFile {}

pub async fn init_search_engine() {
    let j1 = SEARCH_ENGINE_PAPER.get_or_init(|| async {
        info!("Initializing search engine for paper");
        let search_engine_paper: SearchEnginePaper = build_search_engine(
            &CFG.search_paper,
            "paper_id",
            vec!["title", "author", "abstract"],
        )
        .await;
        info!("Search engine for paper initialized");
        search_engine_paper
    });
    let j2 = SEARCH_ENGINE_FILE.get_or_init(|| async {
        info!("Initializing search engine for file");
        let search_engine_file: SearchEngineFile =
            build_search_engine(&CFG.search_file, "file_hash", vec!["content"]).await;
        info!("Search engine for file initialized");
        search_engine_file
    });
    let j1 = tokio::spawn(j1);
    let j2 = tokio::spawn(j2);

    j1.await.unwrap();
    j2.await.unwrap();
}

pub async fn rebuild_index() {
    let j1 = tokio::spawn(async {
        info!("Building index for paper");
        SEARCH_ENGINE_PAPER
            .get()
            .unwrap()
            .build_index()
            .await
            .unwrap();
        info!("Index for paper built");
    });
    let j2 = tokio::spawn(async {
        info!("Building index for file");
        SEARCH_ENGINE_FILE
            .get()
            .unwrap()
            .build_index()
            .await
            .unwrap();
        info!("Index for file built");
    });
    j1.await.unwrap();
    j2.await.unwrap();
}
