package me.itblog.services;

import me.itblog.bean.Article;
import me.itblog.bean.LuceneSearchResult;
import me.itblog.utils.Constant;
import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.*;
import org.apache.lucene.search.vectorhighlight.BaseFragmentsBuilder;
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
import org.apache.lucene.search.vectorhighlight.FieldQuery;
import org.apache.lucene.search.vectorhighlight.FragListBuilder;
import org.apache.lucene.search.vectorhighlight.FragmentsBuilder;
import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder;
import org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.nutz.dao.Dao;
import org.nutz.dao.Sqls;
import org.nutz.dao.sql.Sql;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.lang.Files;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;

/**
 * Created by Administrator on 2015/12/22.
 */
@IocBean(create = "init", depose = "close")
public class ArticleLuceneService {

    private static Logger logger = Logger.getLogger(ArticleLuceneService.class);

    @Inject
    protected Dao dao;

    @Inject("java:$conf.get('lucene.dir')")
    private String indexDir;

    // 索引器
    public IndexWriter writer = null;

    /**
     * 初始化
     *
     * @throws IOException
     */
    public void init() throws IOException {
        Files.createDirIfNoExists(indexDir);
        Directory dir = FSDirectory.open(new File(indexDir));  // 索引文件的保存位置
        Analyzer analyzer = new IKAnalyzer(); // 分析器
        IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_4_10_0, analyzer);  // 配置类
        iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);// 创建模式 OpenMode.CREATE_OR_APPEND 添加模式
        writer = new IndexWriter(dir, iwc);
    }

    public List<LuceneSearchResult> search(String keyword, boolean isHighlight, int searchSize) throws IOException, InvalidTokenOffsetsException, ParseException {
        IndexReader reader = DirectoryReader.open(writer, false);
        try {
            Analyzer ikAnalyzer = new IKAnalyzer();
            IndexSearcher searcher = new IndexSearcher(reader);
            MultiFieldQueryParser parser = new MultiFieldQueryParser(new String[]{"title", "content"}, ikAnalyzer);
            // 将关键字包装成Query对象
            Query query = parser.parse(keyword);
            TopDocs results = searcher.search(query, searchSize);

            SimpleHTMLFormatter formatter = new SimpleHTMLFormatter("<span style=\"background:yellow\">", "</span>");
            QueryScorer scorer = new QueryScorer(query);
            Fragmenter fragmenter = new SimpleSpanFragmenter(scorer);
            Highlighter highlight = new Highlighter(formatter, new SimpleHTMLEncoder(), scorer);
            highlight.setTextFragmenter(fragmenter);

            //System.out.println("命中--》" + results.totalHits);
            List<LuceneSearchResult> searchResults = new ArrayList<LuceneSearchResult>();
            for (ScoreDoc sd : results.scoreDocs) {
                Document doc = searcher.doc(sd.doc);
                String id = doc.get("id"), highContent, highTitle;
                if (isHighlight) {
                    //http://yufenfei.iteye.com/blog/1752194
                    //https://www.baidu.com/s?wd=lucene%20SimpleSpanFragmenter&rsv_spt=1&rsv_iqid=0xb2a013960001b273&issp=1&f=8&rsv_bp=0&rsv_idx=2&ie=utf-8&tn=baiduhome_pg&rsv_enter=1&rsv_sug3=2&rsv_sug1=1&rsv_n=2&rsv_sug2=0&rsv_sug7=101&inputT=630&rsv_sug4=749&rsv_sug=1
                    System.out.println("-------------------------------------");
                    highTitle = highlight.getBestFragment(ikAnalyzer, "title", doc.get("title"));
                    if (highTitle == null) {
                        highTitle = doc.get("title");
                    }
                    highContent = highlight.getBestFragment(ikAnalyzer, "content", doc.get("content"));
                    if (highContent == null) {

                    }
                    System.out.println(highTitle);
                    System.out.println("=====================");
                    System.out.println(highContent);
                } else {
                    highTitle = doc.get("title");
                    highContent = doc.get("content");
                }
                searchResults.add(new LuceneSearchResult(id, highTitle, highContent));
            }
            return searchResults;
        } finally {
            reader.close();
        }
    }

    public List<LuceneSearchResult> search1(String keyword, boolean highlight, int searchSize) throws IOException, ParseException {
        IndexReader reader = DirectoryReader.open(writer, false);
        try {
            IndexSearcher searcher = new IndexSearcher(reader);
            Analyzer analyzer = new IKAnalyzer();
            MultiFieldQueryParser parser = new MultiFieldQueryParser(new String[]{"title", "content"}, analyzer);
            // 将关键字包装成Query对象
            Query query = parser.parse(keyword);
            TopDocs results = searcher.search(query, searchSize);
            FragListBuilder fragListBuilder = new SimpleFragListBuilder();
            FragmentsBuilder fragmentsBuilder = new ScoreOrderFragmentsBuilder(BaseFragmentsBuilder.COLORED_PRE_TAGS, BaseFragmentsBuilder.COLORED_POST_TAGS);

            //System.out.println("命中--》" + results.totalHits);
            List<LuceneSearchResult> searchResults = new ArrayList<LuceneSearchResult>();
            for (ScoreDoc sd : results.scoreDocs) {
                Document doc = searcher.doc(sd.doc);
                String id = doc.get("id"), highContent, highTitle;
                if (highlight) {
                    //http://yufenfei.iteye.com/blog/1752194
                    FastVectorHighlighter fvh = new FastVectorHighlighter(true, true, fragListBuilder, fragmentsBuilder);
                    FieldQuery fq = fvh.getFieldQuery(query);
                    highContent = fvh.getBestFragment(fq, reader, sd.doc, "content", 500);
                    highTitle = fvh.getBestFragment(fq, reader, sd.doc, "title", 300);
                    if (highTitle == null) {
                        /**
                         * 如果高亮内容为null，那么表示标题没有需要高亮的内容，那么赋值为原有标题
                         */
                        highTitle = doc.get("title");
                    }

                    if (highContent == null) {
                        highContent = doc.get("content").length() > 500 ? doc.get("content").substring(500) : doc.get("content");
                    }
                } else {
                    highTitle = doc.get("title");
                    highContent = doc.get("content");
                }
                searchResults.add(new LuceneSearchResult(id, highTitle, highContent));
            }
            return searchResults;
        } finally {
            reader.close();
        }
    }

    /**
     * 增肌Article 索引中
     *
     * @param article
     */
    public void addArticle(Article article) {
        if (article == null) return; // 虽然不太可能,还是预防一下吧
        Document document;
        document = new Document();
        Field field;
        FieldType fieldType;

        // 先加入id
        fieldType = new FieldType();
        fieldType.setIndexed(true);// 索引
        fieldType.setStored(true);// 存储
        fieldType.setStoreTermVectors(true);
        fieldType.setTokenized(true);
        fieldType.setStoreTermVectorPositions(true);// 存储位置
        fieldType.setStoreTermVectorOffsets(true);// 存储偏移量
        field = new Field("id", article.getId(), fieldType);
        document.add(field);

        // 加入标题
        fieldType = new FieldType();
        fieldType.setIndexed(true);// 索引
        fieldType.setStored(true);// 存储
        fieldType.setStoreTermVectors(true);
        fieldType.setTokenized(true);
        fieldType.setStoreTermVectorPositions(true);// 存储位置
        fieldType.setStoreTermVectorOffsets(true);// 存储偏移量
        field = new Field("title", article.getTitle(), fieldType);
        document.add(field);

        // 加入文章内容
        fieldType = new FieldType();
        fieldType.setIndexed(true);// 索引
        fieldType.setStored(true);// 存储
        fieldType.setStoreTermVectors(true);
        fieldType.setTokenized(true);
        fieldType.setStoreTermVectorPositions(true);// 存储位置
        fieldType.setStoreTermVectorOffsets(true);// 存储偏移量

        if (article.getContents() != null) {
            field = new Field("content", article.getContents(), fieldType);
        } else {
            field = new Field("content", "", fieldType);
        }

        document.add(field);
        try {
            writer.addDocument(document);
            writer.commit();
        } catch (IOException e) {
            logger.info("add to index fail : id=" + article.getId());
        }
    }

    /**
     * 重建索引
     *
     * @throws IOException
     */
    public void rebuild() throws IOException {
        Sql sql = Sqls.queryString("select id from t_article");
        dao.execute(sql);
        writer.deleteAll();
        String[] topicIds = sql.getObject(String[].class);
        for (String topicId : topicIds) {
            Article topic = dao.fetch(Article.class, topicId);
            addArticle(topic);
        }
        writer.commit();
    }

    public void close() throws IOException {
        IndexWriter writer = this.writer;
        if (writer != null) {
            writer.commit();
            writer.close();
            this.writer = null;
        }
    }
}
