use crate::QueryParam;
use crate::{domain::topic::Topic, utils};
use cang_jie::{CangJieTokenizer, TokenizerOption, CANG_JIE};
use jieba_rs::Jieba;
use rbatis::rbdc::DateTime;
use rbatis::{IPage, Page};
use regex::Regex;
use std::fs;
use std::path::Path;
use std::sync::Arc;
use tantivy::collector::{Count, TopDocs};
use tantivy::directory::MmapDirectory;
use tantivy::query::{QueryParser, TermQuery};
use tantivy::schema::{
    IndexRecordOption, Schema, TextFieldIndexing, TextOptions, INDEXED, STORED, TEXT,
};
use tantivy::{doc, DateOptions, Index, Term};
use tantivy::{Snippet, SnippetGenerator, TantivyError};

pub fn init_index(data: Vec<Topic>) {
    let mut builder = Schema::builder();
    let text_indexing = TextFieldIndexing::default()
        .set_tokenizer(CANG_JIE)
        .set_index_option(IndexRecordOption::WithFreqsAndPositions);
    let text_options = &TextOptions::default()
        .set_indexing_options(text_indexing)
        .set_stored();

    let id = builder.add_u64_field("id", STORED | INDEXED);
    let title = builder.add_text_field("title", text_options.clone());
    let content = builder.add_text_field("content", text_options.clone());
    let tag = builder.add_text_field("tag", TEXT | STORED);
    let sum = builder.add_text_field("sum", text_options.clone());
    let num = builder.add_u64_field("num", STORED);
    let pic = builder.add_text_field("pic", STORED);
    let hide = builder.add_u64_field("hide", INDEXED | STORED);
    let date_opts = DateOptions::from(INDEXED)
        .set_stored()
        .set_fast()
        .set_precision(tantivy::DateTimePrecision::Seconds);
    let atime = builder.add_date_field("atime", date_opts);

    let schema = builder.build();
    let index = Index::open_or_create(index_dir(), schema).unwrap();
    index.tokenizers().register(CANG_JIE, tokenizer());
    let mut writer = index.writer(50_000_000).unwrap();

    let regex = Regex::new(r#"(<.*?>)|(&.*?;)|([\s\r\t\n]*)"#).unwrap();
    for topic in data {
        let html =
            html_escape::decode_html_entities(topic.content.clone().unwrap().as_str()).to_string();
        let content_val = regex.replace_all(&html, "").to_string();
        let date_time = &topic.atime.to_owned().unwrap();
        let atime_val =
            tantivy::DateTime::from_timestamp_nanos(date_time.unix_timestamp_nano() as i64);
        let title_val = topic.title.clone().unwrap();
        let doc = doc!(
        id=>topic.id.unwrap(),
        title=>title_val,
        content=>content_val,
        pic=>topic.pic.unwrap(),
        tag=>topic.tag.to_owned().unwrap(),
        atime=>atime_val,
        hide=>topic.hide.unwrap() as u64,
        num=>topic.num.unwrap() as u64,
        sum=>topic.sum.to_owned().unwrap());
        writer.add_document(doc).unwrap();
    }
    writer.commit().unwrap();
}

fn index_dir() -> MmapDirectory {
    let mut data_dir = utils::data_dir();
    data_dir.push_str("/index");
    if !Path::new(data_dir.as_str()).exists() {
        fs::create_dir(&data_dir).unwrap();
    }
    let index_dir = MmapDirectory::open(data_dir).unwrap();
    index_dir
}
pub fn save_document(topic: &Topic) -> Result<(), TantivyError> {
    let mut builder = Schema::builder();
    let text_indexing = TextFieldIndexing::default()
        .set_tokenizer(CANG_JIE)
        .set_index_option(IndexRecordOption::WithFreqsAndPositions);
    let text_options = &TextOptions::default()
        .set_indexing_options(text_indexing)
        .set_stored();

    let id = builder.add_u64_field("id", STORED | INDEXED);
    let title = builder.add_text_field("title", text_options.clone());
    let content = builder.add_text_field("content", text_options.clone());
    let tag = builder.add_text_field("tag", TEXT | STORED);
    let sum = builder.add_text_field("sum", text_options.clone());
    let num = builder.add_u64_field("num", STORED);
    let pic = builder.add_text_field("pic", STORED);
    let hide = builder.add_u64_field("hide", INDEXED | STORED);
    let date_opts = DateOptions::from(INDEXED)
        .set_stored()
        .set_fast()
        .set_precision(tantivy::DateTimePrecision::Seconds);
    let atime = builder.add_date_field("atime", date_opts);

    let schema = builder.build();
    let index = Index::open_or_create(index_dir(), schema)?;
    index.tokenizers().register(CANG_JIE, tokenizer());
    let mut writer = index.writer(50_000_000).unwrap();

    //把已经存在的删掉
    let reader = index.reader()?;
    let searcher = reader.searcher();
    let id_term = Term::from_field_u64(id, topic.id.unwrap());
    let term_query = TermQuery::new(id_term.clone(), IndexRecordOption::Basic);
    let exists = if let Ok(docs) = searcher.search(&term_query, &TopDocs::with_limit(1)) {
        docs.first().map_or(false, |_f| true)
    } else {
        false
    };
    if exists {
        writer.delete_term(id_term);
    }
    //删掉html标签
    let regex = Regex::new(r#"(<.*?>)|(&.*?;)|([\s\r\t\n]*)"#).unwrap();
    let html =
        html_escape::decode_html_entities(topic.content.clone().unwrap().as_str()).to_string();
    let content_val = regex.replace_all(&html, "").to_string();
    let date_time = &topic.atime.to_owned().unwrap();
    let atime_val = tantivy::DateTime::from_timestamp_nanos(date_time.unix_timestamp_nano() as i64);
    let title_val = topic.title.clone().unwrap();
    let doc = doc!(
        id=>topic.id.unwrap(),
        title=>title_val,
        content=>content_val,
        pic=>topic.pic.to_owned().unwrap(),
        tag=>topic.tag.to_owned().unwrap(),
        atime=>atime_val,
        hide=>topic.hide.unwrap() as u64,
        num=>topic.num.unwrap_or_default() as u64,
        sum=>topic.sum.to_owned().unwrap());
    writer.add_document(doc)?;
    writer.commit()?;
    Ok(())
}

pub fn delete(id_val: u64) {
    let index = Index::open(index_dir()).unwrap();
    let id = index.schema().get_field("id").unwrap();
    let reader = index.reader().unwrap();
    let searcher = reader.searcher();
    let id_term = Term::from_field_u64(id, id_val);
    let term_query = TermQuery::new(id_term.clone(), IndexRecordOption::Basic);
    let exists = if let Ok(docs) = searcher.search(&term_query, &TopDocs::with_limit(1)) {
        docs.first().map_or(false, |_f| true)
    } else {
        false
    };
    if exists {
        let mut writer = index.writer(50_000_000).unwrap();
        writer.delete_term(id_term);
        writer.commit().unwrap();
    }
}

pub fn search(param: &QueryParam, login: bool) -> Page<Topic> {
    let index = Index::open(index_dir()).unwrap();
    index.tokenizers().register(CANG_JIE, tokenizer());
    let schema = index.schema();
    let id = schema.get_field("id").unwrap();
    let title = schema.get_field("title").unwrap();
    let content = schema.get_field("content").unwrap();
    let pic = schema.get_field("pic").unwrap();
    let sum = schema.get_field("sum").unwrap();
    let num = schema.get_field("num").unwrap();
    let tag = schema.get_field("tag").unwrap();
    let atime = schema.get_field("atime").unwrap();

    let reader = index
        .reader_builder()
        .reload_policy(tantivy::ReloadPolicy::OnCommit)
        .try_into()
        .unwrap();
    let searcher = reader.searcher();
    let mut term = param.kw.to_owned().unwrap();
    if !login {
        term.push_str(" AND hide:0");
    }
    let parser = QueryParser::for_index(&index, vec![title, content]);
    let query = parser.parse_query(&term).unwrap();

    // let term = Term::from_field_text(title, param.kw.to_owned().unwrap().as_str());
    // let query = FuzzyTermQuery::new(term, 1, true);

    let page_size = param.page_size.map_or(10, |f| f) as usize;
    let page_no = param.page_no.map_or(0, |f| f - 1) as usize;
    let (results, count) = searcher
        .search(
            &query,
            &(
                TopDocs::with_limit(page_size).and_offset(page_no * page_size as usize),
                Count,
            ),
        )
        .unwrap();
    let gen_c = SnippetGenerator::create(&searcher, &*query, content).unwrap();
    let gen_t = SnippetGenerator::create(&searcher, &*query, title).unwrap();
    let mut list: Vec<Topic> = Vec::new();
    for (_score, addr) in results {
        let doc = searcher.doc(addr).unwrap();
        let id = doc
            .get_first(id)
            .and_then(|v| v.as_u64())
            .unwrap()
            .to_owned();
        let num = doc
            .get_first(num)
            .and_then(|v| v.as_u64())
            .unwrap()
            .to_owned();
        let pic = doc
            .get_first(pic)
            .and_then(|v| v.as_text())
            .unwrap()
            .to_owned();
        let mut title = doc
            .get_first(title)
            .and_then(|v| v.as_text())
            .unwrap()
            .to_owned();
        let content = doc
            .get_first(content)
            .and_then(|v| v.as_text())
            .unwrap()
            .to_owned();
        let mut sum = doc
            .get_first(sum)
            .and_then(|v| v.as_text())
            .unwrap()
            .to_owned();
        let tag = doc
            .get_first(tag)
            .and_then(|v| v.as_text())
            .unwrap()
            .to_owned();
        let atime = doc
            .get_first(atime)
            .and_then(|v| v.as_date())
            .unwrap()
            .to_owned();
        let mut topic = Topic::default();

        let cont_snip = gen_c.snippet_from_doc(&doc);
        let title_snip = gen_t.snippet_from_doc(&doc);
        let ts = highlight(&title_snip);
        let cs = highlight(&cont_snip);
        if !ts.is_empty() {
            title = ts;
        }
        if !cs.is_empty() {
            sum = cs;
        }
        topic.id = Some(id);
        topic.title = Some(title);
        topic.content = Some(content);
        topic.sum = Some(sum);
        topic.num = Some(num as u32);
        topic.tag = Some(tag);
        topic.pic = Some(pic);
        topic.atime = Some(DateTime::from_timestamp_nano(
            atime.into_timestamp_nanos() as i128
        ));
        list.push(topic);
    }
    Page::new((page_no + 1) as u64, page_size as u64)
        .set_total(count as u64)
        .set_records(list)
}
fn tokenizer() -> CangJieTokenizer {
    CangJieTokenizer {
        worker: Arc::new(Jieba::empty()),
        option: TokenizerOption::ForSearch { hmm: false },
    }
}
fn highlight(snippet: &Snippet) -> String {
    let mut result = String::new();
    let mut start_from = 0;

    for fragment_range in snippet.highlighted() {
        result.push_str(&snippet.fragment()[start_from..fragment_range.start]);
        result.push_str("<font color='red'>");
        result.push_str(&snippet.fragment()[fragment_range.clone()]);
        result.push_str("</font>");
        start_from = fragment_range.end;
    }

    result.push_str(&snippet.fragment()[start_from..]);
    result
}
