package com.hefei.blog.core.lucene.article;

import java.io.IOException;
import java.io.StringReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;

import org.apache.log4j.Logger;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleFragmenter;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;

import com.hefei.blog.base.lang.EntityStatus;
import com.hefei.blog.base.lang.EnumPrivacy;
import com.hefei.blog.base.paging.Paging;
import com.hefei.blog.core.data.Post;
import com.hefei.blog.core.lucene.AnalyzerUtil;
import com.hefei.blog.core.lucene.LuceneConstants;
import com.hefei.frontend.framework.http.request.RequestThreadLocal;

public class ArticleIndex {

	static Logger logger = Logger.getLogger(ArticleIndex.class);

	/**
	 * 索引是否已经创建过
	 */
	static Path docDir = Paths.get(LuceneConstants.INDEX_ROOT);


	public static void addIndex(Post post) {
		if (!Files.isReadable(docDir)) {
			logger.error(" Path does not exist or is not readable " + docDir.toAbsolutePath());
			System.exit(1);
		}
		logger.info("addIndex to directory  " + docDir.toAbsolutePath());
		long beginTimer = System.currentTimeMillis();
		IndexWriter writer = null;
		try {
			Directory dir = FSDirectory.open(Paths.get(LuceneConstants.ARTICLE_INDEX));
			IndexWriterConfig iwc = new IndexWriterConfig(AnalyzerUtil.getAnalyzer());
			iwc.setOpenMode(OpenMode.CREATE_OR_APPEND);

			writer = new IndexWriter(dir, iwc);
			writer.addDocument(ArticleDocument.genDocument(post));

			logger.info("addIndex cost " + (System.currentTimeMillis() - beginTimer));
		} catch (Exception e) {
			logger.error(" Error ", e);
		} finally {
			if (writer != null) {
				try {
					writer.close();
				} catch (Exception e) {
					logger.error(" Error ", e);
				}
			}
		}
	}

	public static void updateIndex(Post post) {
		if (!Files.isReadable(docDir)) {
			logger.error(" Path does not exist or is not readable " + docDir.toAbsolutePath());
			System.exit(1);
		}
		logger.info("updateIndex to directory  " + docDir.toAbsolutePath());
		long beginTimer = System.currentTimeMillis();
		IndexWriter writer = null;
		try {
			Directory dir = FSDirectory.open(Paths.get(LuceneConstants.ARTICLE_INDEX));
			IndexWriterConfig iwc = new IndexWriterConfig(AnalyzerUtil.getAnalyzer());
			iwc.setOpenMode(OpenMode.APPEND);

			writer = new IndexWriter(dir, iwc);
			writer.updateDocument(new Term("id", String.valueOf(post.getId())), ArticleDocument.genDocument(post));

			logger.info("updateIndex cost " + (System.currentTimeMillis() - beginTimer));
		} catch (Exception e) {
			logger.error(" Error ", e);
		} finally {
			if (writer != null) {
				try {
					writer.close();
				} catch (Exception e) {
					logger.error(" Error ", e);
				}
			}
		}
	}
	
	
	/** 
	 * 根据页码和分页大小获取上一次的最后一个scoredocs 
	 * @param pageIndex 
	 * @param pageSize 
	 * @param query 
	 * @param searcher 
	 * @return 
	 * @throws IOException 
	 */  
	private static ScoreDoc getLastScoreDoc(Query query,IndexSearcher searcher,Sort sort, int pageIndex,int pageSize) throws Exception {  
	    if(pageIndex==1)return null;//如果是第一页就返回空  
	    int num = pageSize*(pageIndex-1);//获取上一页的最后数量  
	    TopDocs tds = searcher.search(query, num, sort);  
	    return tds.scoreDocs[num-1];  
	}  

	public static void searchArticle(Paging paging, String searchKey) {
		Directory directory = null;
		IndexSearcher searcher = null;
		IndexReader reader = null;
		long beginTimer = System.currentTimeMillis();
		try {
			reader = DirectoryReader.open(FSDirectory.open(Paths.get(LuceneConstants.ARTICLE_INDEX)));
		    searcher = new IndexSearcher(reader);
			
		 // 多条件必备神器
		    BooleanQuery.Builder builder = new BooleanQuery.Builder();
		    
//		    QueryParser queryParser = new QueryParser("content", AnalyzerUtil.getAnalyzer());
//		    	实际使用中一般是多目标搜索
//		    MultiFieldQueryParser queryParser = new MultiFieldQueryParser(new String[]{"content","title","summary", "tags"}, AnalyzerUtil.getAnalyzer());
//		    Query query = queryParser.p.parse(searchKey);
//		  
//		    Query bq1 =  MultiFieldQueryParser.parse(new String[]{searchKey, searchKey, searchKey, searchKey}, new String[]{"content","title","summary", "tags"}, AnalyzerUtil.getAnalyzer());
		   
		    Query bq2 =  MultiFieldQueryParser.parse(searchKey, new String[]{"content","title","summary", "tags"},
		    		new BooleanClause.Occur[]{BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD}, AnalyzerUtil.getAnalyzer());
		    		
		    BooleanQuery.Builder keyBuilder = new BooleanQuery.Builder();
		    keyBuilder.add(bq2, Occur.MUST );
		    
//		    Query bq3 =  MultiFieldQueryParser.parse("勋", new String[]{"content","title","summary", "tags"},
//		    		new BooleanClause.Occur[]{BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD}, AnalyzerUtil.getIkAnalyzer());
//		    keyBuilder.add(bq3, Occur.MUST );
		    
		 // contents必须含有条件一
		    builder.add(keyBuilder.build(), Occur.MUST );
		    
		    Query privacyQuery = new TermQuery( new Term("privacy", String.valueOf(EnumPrivacy.OPEN.getIndex())));
		    builder.add(privacyQuery, Occur.MUST);
		    Query statusQuery = new TermQuery( new Term("status", String.valueOf(EntityStatus.STATUS_NORMAL)));
		    builder.add(statusQuery, Occur.MUST);
		    
//		  //构建一个查询
//		    Query createTimeRangeQuery = LongPoint.newRangeQuery("createTime", new Date().getTime(), new Date().getTime());
//		    builder.add(createTimeRangeQuery, Occur.MUST);
		    		
			//构建两个SortField
			Sort sort = new Sort();
			//按照createTime排序，降序
			SortField sf1 = new SortField("createTime", SortField.Type.STRING, true);
			//按id排序，降序
			SortField sf2 = new SortField("id", SortField.Type.STRING, true);
			//先按照createTime排，在按照id排
			sort.setSort(new SortField[]{sf1,sf2});

			//获取上一页的最后一个元素  
	        ScoreDoc lastSd = getLastScoreDoc(builder.build(), searcher, sort, paging.getPageNo(), paging.getMaxResults()); 
	        
			TopDocs topDocs = searcher.searchAfter(lastSd, builder.build(), paging.getMaxResults(), sort);
			
			paging.setTotalCount(topDocs.totalHits);
			ScoreDoc[] scoreDocs = topDocs.scoreDocs;
			
//			// 设置高亮显示的颜色等样式
//			SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter("<FONT COLOR='RED'>", "</FONT>");
//			Highlighter highlighter = new Highlighter(simpleHtmlFormatter, new QueryScorer(builder.build()));
//			// 设置高亮显示的字符串长度
//			highlighter.setTextFragmenter(new SimpleFragmenter(100));
			List<Post> posts = new ArrayList<Post>();
			for (int i = 0; i < scoreDocs.length; i++) {
				Document targetDoc = searcher.doc(scoreDocs[i].doc);
				Post post = ArticleDocument.parseDocument(targetDoc);
				
//				TokenStream summaryTokenStream = AnalyzerUtil.getIkAnalyzer().tokenStream("summary", new StringReader(post.getSummary()));
//				// 读取索引的高亮信息
//				String summary = highlighter.getBestFragment(summaryTokenStream, post.getSummary());
//				if(StringUtils.isNotBlank(summary))
//					post.setSummary(summary);
//				
//				TokenStream titleTokenStream = AnalyzerUtil.getIkAnalyzer().tokenStream("title", new StringReader(post.getSummary()));
//				// 读取索引的高亮信息
//				String title = highlighter.getBestFragment(titleTokenStream, post.getSummary());
//				if(StringUtils.isNotBlank(title))
//					post.setTitle(title);
				
				posts.add(post);
			}
			paging.setResults(posts);
		} catch (Exception e) {
			logger.error(RequestThreadLocal.getTimer() + " Error", e);
		} finally {
			if (reader != null) {
				try {
					reader.close();
				} catch (Exception e) {
					logger.error(RequestThreadLocal.getTimer() + " Error", e);
				}
			}
			if (directory != null) {
				try {
					directory.close();
				} catch (Exception e) {
					logger.error(RequestThreadLocal.getTimer() + " Error", e);
				}
			}

			// 搜索所耗时间
			logger.info(RequestThreadLocal.getTimer() + " cost " + (System.currentTimeMillis() - beginTimer));
		}
	}
	
	public static void searchArticleByTag(Paging paging, String tag) {
		Directory directory = null;
		IndexSearcher searcher = null;
		IndexReader reader = null;
		long beginTimer = System.currentTimeMillis();
		try {
			reader = DirectoryReader.open(FSDirectory.open(Paths.get(LuceneConstants.ARTICLE_INDEX)));
		    searcher = new IndexSearcher(reader);
			
		    QueryParser queryParser = new QueryParser("tags", AnalyzerUtil.getAnalyzer());
		    Query query = queryParser.parse(tag);
		  
		    // 多条件必备神器
		    BooleanQuery.Builder builder = new BooleanQuery.Builder();
		 // contents必须含有条件一
		    builder.add( query, Occur.MUST );
		    
		    Query privacyQuery = IntPoint.newSetQuery("privacy", EnumPrivacy.OPEN.getIndex());
		    builder.add(privacyQuery, Occur.MUST);
//		  //构建一个查询
//		    Query createTimeRangeQuery = LongPoint.newRangeQuery("createTime", new Date().getTime(), new Date().getTime());
//		    builder.add(createTimeRangeQuery, Occur.MUST);
		    		
			//构建两个SortField
			Sort sort = new Sort();
			//按照createTime排序，降序
			SortField sf1 = new SortField("createTime", SortField.Type.LONG, false);
			//按id排序，降序
			SortField sf2 = new SortField("id", SortField.Type.LONG, false);
			//先按照publishdate排，在按照bookNumber排
			sort.setSort(new SortField[]{sf1, sf2});

			//获取上一页的最后一个元素  
	        ScoreDoc lastSd = getLastScoreDoc(query, searcher, sort, paging.getPageNo(), paging.getPageCount()); 
	        
			TopDocs topDocs = searcher.searchAfter(lastSd, query, paging.getPageCount(), sort);
			
			paging.setTotalCount(topDocs.totalHits);
			ScoreDoc[] scoreDocs = topDocs.scoreDocs;
			
			// 设置高亮显示的颜色等样式
			SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter("<FONT COLOR='RED'>", "</FONT>");
			Highlighter highlighter = new Highlighter(simpleHtmlFormatter, new QueryScorer(query));
			// 设置高亮显示的字符串长度
			highlighter.setTextFragmenter(new SimpleFragmenter(100));
			List<Post> posts = new ArrayList<Post>();
			for (int i = 0; i < scoreDocs.length; i++) {
				Document targetDoc = searcher.doc(scoreDocs[i].doc);
				
				TokenStream tokenStream = AnalyzerUtil.getAnalyzer().tokenStream("", new StringReader(targetDoc.get("summary")));
				// 读取索引的高亮信息
				String summary = highlighter.getBestFragment(tokenStream, targetDoc.get("summary"));
				
				Post post = ArticleDocument.parseDocument(targetDoc);
				post.setSummary(summary);
				posts.add(post);
			}
			paging.setResults(posts);
		} catch (Exception e) {
			logger.error(RequestThreadLocal.getTimer() + " Error", e);
		} finally {
			if (reader != null) {
				try {
					reader.close();
				} catch (Exception e) {
					logger.error(RequestThreadLocal.getTimer() + " Error", e);
				}
			}
			if (directory != null) {
				try {
					directory.close();
				} catch (Exception e) {
					logger.error(RequestThreadLocal.getTimer() + " Error", e);
				}
			}

			// 搜索所耗时间
			logger.info(RequestThreadLocal.getTimer() + " cost " + (System.currentTimeMillis() - beginTimer));
		}
	}

	public static void deleteIndex(Long id) {
		if (!Files.isReadable(docDir)) {
			logger.error(" Path does not exist or is not readable " + docDir.toAbsolutePath());
			System.exit(1);
		}
		logger.info("deleteIndex to directory  " + docDir.toAbsolutePath());
		long beginTimer = System.currentTimeMillis();
		IndexWriter writer = null;
		try {
			Directory dir = FSDirectory.open(Paths.get(LuceneConstants.ARTICLE_INDEX));
			IndexWriterConfig iwc = new IndexWriterConfig(AnalyzerUtil.getAnalyzer());
			iwc.setOpenMode(OpenMode.APPEND);

			writer = new IndexWriter(dir, iwc);
			writer.deleteDocuments(new Term("id", String.valueOf(id)));

			logger.info("deleteIndex cost " + (System.currentTimeMillis() - beginTimer));
		} catch (Exception e) {
			logger.error(" Error ", e);
		} finally {
			if (writer != null) {
				try {
					writer.close();
				} catch (Exception e) {
					logger.error(" Error ", e);
				}
			}
		}
	}
}
