use std::collections::HashSet;

use tantivy::{collector::{FilterCollector, TopDocs}, doc, query::QueryParser, schema::Value, TantivyDocument, Term};

use crate::{app_error::AppError, app_writer::AppResult, db::db_get, dtos::{files::FileResponse, paper::PaperResponse, search::SearchRequest}, entities::{files::File, papers::Paper}, search_engine::{SearchEngine, SearchEngineFile, SearchEnginePaper}, utils::{file_utils::{extract_string, get_file_path}, rand_utils::calculate_hash}};

use super::{file::get_file_info_by_userid, paper::paper};


impl SearchEnginePaper {
    pub async fn add_paper(&self, paper: Paper) -> AppResult<()> {
        let fields = self.get_fields();
        let ids_field = self.get_ids_field();
        let ids = calculate_hash(paper.id.as_bytes());
        let doc = doc!(
            fields.paper_id => paper.id,
            ids_field => ids,
            fields.title => paper.title,
            fields.author => paper.author,
            fields.abstract_ => paper.abstract_
        );
        let index_writer = self.get_index_writer();
        let index_writer = index_writer.read().await;
        let _ = index_writer.add_document(doc)?;
        Ok(())
    }

    pub async fn delete_paper(&self, paper_id: &str) {
        let fields = self.get_fields();
        let term = Term::from_field_text(fields.paper_id, paper_id);

        let index_writer = self.get_index_writer();
        let index_writer = index_writer.read().await;
        index_writer.delete_term(term);
    }

    pub async fn update_paper(&self, paper: Paper) -> AppResult<()> {
        self.delete_paper(paper.id.as_str()).await;
        self.add_paper(paper).await?;
        Ok(())
    }

    pub async fn search(
        &self,
        req: SearchRequest,
        user_id: &str,
    ) -> AppResult<Vec<PaperResponse>> {
        let db = db_get()?;
        let papers_id = sqlx::query!(
            r#"
            SELECT paper_id FROM users_papers
            WHERE user_id = $1
            "#,
            user_id
        )
        .fetch_all(db)
        .await?
        .into_iter()
        .map(|rec| calculate_hash(rec.paper_id.as_bytes()))
        .collect::<HashSet<u64, nohash_hasher::BuildNoHashHasher<u64>>>();
        let mut res = Vec::new();
        res.reserve(req.top);

        let fields = self.get_fields();
        let index_reader = self.get_index_reader();
        let searcher = index_reader.searcher();
        let query_parser = QueryParser::for_index(
            self.get_index(),
            vec![fields.title, fields.author, fields.abstract_],
        );
        let query = query_parser.parse_query(&req.query)?;
        let top = TopDocs::with_limit(req.top);
        let filter_collector = FilterCollector::new(
            "_ids".to_string(),
             move |value: u64| {
                papers_id.contains(&value)
             },
            top,
        );

        let top_docs = searcher.search(&query, &filter_collector)?;
        for (_, doc_address) in top_docs {
            match {
                let doc = searcher.doc::<TantivyDocument>(doc_address)?;
                let paper_id = doc
                    .get_first(fields.paper_id)
                    .ok_or(anyhow::anyhow!("Missing paper_id field"))?;
                let paper_id = paper_id
                    .as_str()
                    .ok_or(anyhow::anyhow!("Invalid paper_id field"))?;
                let paper = paper(paper_id).await?;
                Ok::<PaperResponse, AppError>(paper)
            } {
                Ok(paper) => res.push(paper),
                Err(e) => {
                    tracing::error!("Error while searching paper: {e}");
                    continue;
                }
            }
        }
        Ok(res)
    }
}

impl SearchEngineFile {
    pub async fn add_file(&self, file: File) -> AppResult<()> {
        let path = get_file_path(&file.file_hash);
        let data = tokio::fs::read(path).await?;
        let text = extract_string(data.as_slice(), file.file_type.as_str())?;

        let fields = self.get_fields();
        let ids_field = self.get_ids_field();
        let ids = calculate_hash(file.file_hash.as_bytes());
        let doc = doc!(
            fields.file_hash => file.file_hash,
            ids_field => ids,
            fields.content => text,
        );

        let index_writer = self.get_index_writer();
        let index_writer = index_writer.read().await;
        let _ = index_writer.add_document(doc)?;
        
        Ok(())
    }

    pub async fn delete_file(&self, file_hash: &str) {
        let fields = self.get_fields();
        let term = Term::from_field_text(fields.file_hash, file_hash);


        let index_writer = self.get_index_writer();
        let index_writer = index_writer.read().await;
        let _ = index_writer.delete_term(term);
    }

    pub async fn search(
        &self,
        req: SearchRequest,
        user_id: &str,
    ) -> AppResult<Vec<FileResponse>> {
        let db = db_get()?;
        let files_hash = sqlx::query!(
            r#"
                SELECT file_hash FROM papers_files
                INNER JOIN users_papers 
                ON papers_files.paper_id = users_papers.paper_id
                WHERE users_papers.user_id = $1
            "#,
            user_id
        )
        .fetch_all(db)
        .await?
        .into_iter()
        .map(|row| calculate_hash(row.file_hash.as_bytes()))
        .collect::<HashSet<u64, nohash_hasher::BuildNoHashHasher<u64>>>();

        let mut res = Vec::new();
        res.reserve(req.top);

        let fields = self.get_fields();
        let index_reader = self.get_index_reader();
        let searcher = index_reader.searcher();
        let query_parser = QueryParser::for_index(
            self.get_index(),
            vec![fields.content],
        );
        let query = query_parser.parse_query(&req.query)?;
        let top = TopDocs::with_limit(req.top);
        let filter_collector = FilterCollector::new(
            "_ids".to_string(),
            move |value: u64| {
                files_hash.contains(&value)
            },
            top,
        );
        let top_docs = searcher.search(&query, &filter_collector)?;
        for (_, doc_address) in top_docs {
            match {
                let doc = searcher.doc::<TantivyDocument>(doc_address)?;
                let file_hash = doc
                    .get_first(fields.file_hash)
                    .ok_or(anyhow::anyhow!("Missing paper_id field"))?;
                let file_hash = file_hash
                    .as_str()
                    .ok_or(anyhow::anyhow!("Invalid paper_id field"))?;
                let paper = get_file_info_by_userid(user_id, file_hash).await?;
                Ok::<FileResponse, AppError>(paper)
            } {
                Ok(paper) => res.push(paper),
                Err(e) => {
                    tracing::error!("Error while searching paper: {e}");
                    continue;
                }
            }
        }
        Ok(res)
    }
}


#[cfg(test)]
mod tests {
    use std::time::Duration;

    use tantivy::collector::FilterCollector;

    use super::*;
    use crate::{db::init_db_conn, search_engine::{self, get_search_engine_paper, init_search_engine, rebuild_index}, utils::rand_utils::calculate_hash};
    #[tokio::test]
    async fn test_search() {
        let _ = init_db_conn().await;
        let _ = init_search_engine().await;
        let _ = rebuild_index().await;

        let search_engine = get_search_engine_paper().await.unwrap();
        let req = SearchRequest {
            query: "Zhang".to_string(),
            top: 10
        };
        let user_id = "cdd0e080-5bb1-4442-b6f7-2ba60dbd0555";
        let paper_id = "19eac4d8-53a3-4625-b1e7-154ddb365657";
        tokio::time::sleep(Duration::from_secs(5)).await;
        let res = search_engine.search(req, user_id).await.unwrap();
        println!("{:?}", res);
        // let fields = search_engine.get_fields();
        // let index_reader = search_engine.get_index_reader();
        // let searcher = index_reader.searcher();
        // let query_parser = QueryParser::for_index(
        //     search_engine.get_index(),
        //     vec![fields.title, fields.author, fields.abstract_],
        // );
        // let query = query_parser.parse_query(&req.query).unwrap();
        // let top = TopDocs::with_limit(req.top);
        // let filter_collector = FilterCollector::new("_ids".to_string(), |value: u64| {
        //     value == calculate_hash(paper_id.as_bytes())
        // }, top);
        
        // let top_docs = searcher.search(&query, &filter_collector).unwrap();
        // for (_ , doc) in top_docs {
        //     let doc = searcher.doc::<TantivyDocument>(doc).unwrap();
        //     let paper_id = doc.get_first(fields.paper_id).unwrap()
        //     .as_str().unwrap();
        //     println!("paper_id: {}", paper_id);
        // }

    }
}