package com.cp.service.impl;

import com.cp.repository.BNBZEntity;
import com.cp.repository.Config;
import com.cp.repository.PageEntity;
import com.cp.service.LuceneQuery;
import com.cp.service.LuceneService;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;



@Service
public class LuceneServiceImpl implements LuceneService {
	Logger log = LogManager.getLogger();

	@Autowired
	Config config;

	public final static Analyzer analyzer = new IKAnalyzer(true);

	@Override
	public Map<String, Object> search(String keyWord, Integer pageNumber) {
		IndexReader ir = null;
		try {
			Directory dir = FSDirectory.open(new File(config.getIndexPath()));
			ir = DirectoryReader.open(dir);
			IndexSearcher searcher = new IndexSearcher(ir);
			keyWord = QueryParser.escape(keyWord);
			if(!"".equals(keyWord)) {
				//Query strQuery = getQueryParser(keyWord, Version.LUCENE_46, "content", new IKAnalyzer(true));
				Query strQuery = LuceneQuery.multiFieldQuery(keyWord, "3");
				TopDocs topDocs = searcher.search(strQuery, Integer.MAX_VALUE);
				System.out.println("total:"+ topDocs.totalHits);
				if(topDocs.totalHits>0) {
					return printResult(topDocs, searcher, keyWord, strQuery, pageNumber);
				}
			}
			return null;
		} catch(Exception ex) {
			log.error(ex.getMessage());
			ex.printStackTrace();
		} finally {
			try {
				ir.close();
			} catch (IOException e) {
				log.error(e.getMessage());
				e.printStackTrace();
			}
		}
		return null;
	}

	/**
	 * 模糊搜索
	 * @param keyWord
	 * @param v
	 * @param field
	 * @param a
	 * @return
	 */
	private Query getQueryParser(String keyWord, Version v, String field, Analyzer a) {
		QueryParser parser = new QueryParser(v, field, a);
		Query query = null;
		try {
			query = parser.createPhraseQuery(field, keyWord);
		} catch (Exception e) {
			e.printStackTrace();
		}
		return query;
	}

	/**
	 * 高亮显示
	 * @param results
	 */
	private Map<String, Object> printResult(TopDocs results, IndexSearcher searcher, String keyWord, Query query, int currentPage) {
		long start = System.currentTimeMillis();
		ScoreDoc[] hit = results.scoreDocs;
		int rowCount = results.totalHits;
		System.out.println("共找到"+rowCount+"条，---"+hit.length);
		Map<String, Object> map = new HashMap<String, Object>();
		List<BNBZEntity> bList = new ArrayList<BNBZEntity>();
		PageEntity pageEntity = new PageEntity();
		pageEntity.setCurrentPage(currentPage);
		pageEntity.setPageRowCount(20);
		pageEntity.setRowCount(rowCount);
		if (rowCount == 0) {
			pageEntity.setPageCount(0);
			System.out.println("对不起，没有找到您要的结果。");
		} else {
			pageEntity.setPageCount(pageEntity.getPageCount(20, rowCount));
			int startIndex = (currentPage - 1) * 20;
			int endIndex = Math.min(rowCount, currentPage * 20);
			//hit.length
			for (int i = startIndex; i < endIndex; i++) {
				BNBZEntity bEntity = new BNBZEntity();
				try {
					int docId = hit[i].doc;
					Document doc = searcher.doc(docId);
					String title = doc.get("title");
					String content = doc.get("content");
					SimpleHTMLFormatter formatter=new SimpleHTMLFormatter("<strong><font color='red'>","</font></strong>");
					Highlighter highlighter=new Highlighter(formatter, new QueryScorer(query));
					TokenStream tokenstream=analyzer.tokenStream(keyWord, new StringReader(content));
					try {
						content=highlighter.getBestFragment(tokenstream, content);
					} catch (InvalidTokenOffsetsException e) {
						e.printStackTrace();
					}
					bEntity.setId(doc.get("id"));
					bEntity.setTitle(title);
					bEntity.setContent(content);
					bEntity.setScore(hit[i].score);
					bList.add(bEntity);
				} catch (Exception e) {
					e.printStackTrace();
				}
			}
		}
		map.put("pageEntity", pageEntity);
		map.put("entity", bList);
		map.put("usedTime", (System.currentTimeMillis()-start));
		return map;
	}
}
