package com.yihaodian.search.highlight;

import java.io.IOException;
import java.io.StringReader;
import java.util.HashMap;
import java.util.Map;

import org.apache.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.ChineseAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.search.vectorhighlight.FastVectorHighlighter;
import org.apache.lucene.search.vectorhighlight.FieldQuery;
import org.apache.lucene.search.vectorhighlight.FragListBuilder;
import org.apache.lucene.search.vectorhighlight.SimpleFragListBuilder;
import org.apache.lucene.search.vectorhighlight.SimpleFragmentsBuilder;

import com.yihaodian.api.search.SearchException;
import com.yihaodian.api.search.grouping.GroupingResult;
import com.yihaodian.api.search.highlight.HighlightSpec;
import com.yihaodian.api.search.misc.SearchHit;
import com.yihaodian.search.SearchContext;

public class HighlightHandler {
	static Logger logger = Logger.getLogger(HighlightHandler.class);

	public static void highlight(SearchContext context) throws SearchException {
		try {
			IndexReader reader = context.getSearcher().reader();
			Query query = context.getQuery();
			HighlightSpec[] highlightSpecs = context.getShardRequest().getSearchRequest()
					.getHighlightSpecs();
			SearchHit[] hits = context.getResponse().getHits();
			if (highlightSpecs == null || highlightSpecs.length == 0
					|| hits == null) {
				return;
			}
			for (int i = 0; i < hits.length; i++) {
				highlight(reader, hits[i], query, highlightSpecs);
			}
		} catch (Exception e) {
			logger.warn("Highligh error!", e);
			throw new SearchException(e);
		}
	}
	
	/**
	 * Highlight the hits in groups.
	 * Fetch hits for each group and highlight them one by one.
	 * @param context
	 * @throws SearchException
	 */
	public static void highlightGrouping(SearchContext context) throws SearchException {
		try {
			IndexReader reader = context.getSearcher().reader();
			Query query = context.getQuery();
			HighlightSpec[] highlightSpecs = context.getShardRequest().getSearchRequest()
					.getHighlightSpecs();
			
			GroupingResult[] groups = context.getResponse().getGroupingResult();
			// Return if no hilight spec or grouping information
			if (groups == null || groups.length == 0 ||
					highlightSpecs == null || highlightSpecs.length == 0) {
				return;
			}
			
			for (int i = 0; i < groups.length; i++) {
				SearchHit[] hits = groups[i].getHits();
				if (hits == null) {
					continue;
				}
				for (int j = 0; j < hits.length; j++) {
					highlight(reader, hits[j], query, highlightSpecs);
				}
			}
		} catch (Exception e) {
			logger.warn("Highligh error!", e);
			throw new SearchException(e);
		}
	}

	public static void highlight(IndexReader reader, SearchHit hit,
			Query query, HighlightSpec[] highlightSpecs) throws SearchException {
		Map<String, String[]> highlightMap = new HashMap<String, String[]>();
		FragListBuilder fragListBuilder = new SimpleFragListBuilder();
		SimpleFragmentsBuilder fragmentsBuilder = new SimpleFragmentsBuilder(
				new String[]{"<span style=\"color: red;\">"}, 
				new String[]{"</span>"});
		FastVectorHighlighter highliter = new FastVectorHighlighter(true, false,
				fragListBuilder, fragmentsBuilder);
		try {
			for (int i = 0; i < highlightSpecs.length; i++) {
				HighlightSpec spec = highlightSpecs[i];
				String[] fragments;
				if (spec.getHighlighterType() == HighlightSpec.TYPE_VECT_HIGHLIGHT) {
					fragments = getBestFragments(reader, hit.getDocId(), query,
							highliter, spec.getField(),
							spec.getFragmentCharSize(),
							spec.getNumberOfFragments());
				} else {
					fragments = getBestFragmentsOld(reader, hit.getDocId(),
							query, highliter, spec.getField(),
							spec.getFragmentCharSize(),
							spec.getNumberOfFragments());
				}
				highlightMap.put(spec.getField(), fragments);
			}
		} catch (Exception e) {
			throw new SearchException(e);
		}

		hit.setHighlightValues(highlightMap);
	}

	public static String[] getBestFragmentsOld(IndexReader reader, int docId,
			Query query, FastVectorHighlighter highliter, String fieldName,
			int fragCharSize, int maxNumFragments) throws IOException,
			InvalidTokenOffsetsException {
		Document doc = reader.document(docId);
		SimpleHTMLFormatter htmlFormat = new SimpleHTMLFormatter("<b>", "</b>");
		QueryScorer scorerContent = new QueryScorer(query);
		Highlighter highLightContent = new Highlighter(htmlFormat,
				scorerContent);

		/**
		 * TODO: Get analiyzer from request.
		 */
		Analyzer analyzer = new ChineseAnalyzer();
		TokenStream tokenStreamContent = analyzer.tokenStream(fieldName,
				new StringReader(doc.get(fieldName)));
		String[] fragments = highLightContent.getBestFragments(
				tokenStreamContent, doc.get(fieldName), maxNumFragments);
		return fragments;
	}

	public static String[] getBestFragments(IndexReader reader, int docId,
			Query query, FastVectorHighlighter highliter, String fieldName,
			int fragCharSize, int maxNumFragments) throws IOException {

		FieldQuery fieldQuery = highliter.getFieldQuery(query);
		return highliter.getBestFragments(fieldQuery, reader, docId, fieldName,
				fragCharSize, maxNumFragments);
	}
}
