package com.cp.service;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Version;
import org.wltea.analyzer.core.IKSegmenter;
import org.wltea.analyzer.core.Lexeme;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.io.IOException;
import java.io.StringReader;

public class LuceneQuery {
	/**
	 * 模糊搜索
	 *
	 * @param keyWord
	 * @param v
	 * @param field
	 * @param a
	 * @return
	 */
	public static Query getQueryParser(String keyWord, Version v, String field, Analyzer a) {
		QueryParser parser = new QueryParser(v, field, a);
		Query query = null;
		try {
			query = parser.parse(keyWord);
		} catch (ParseException e) {
			e.printStackTrace();
		}
		return query;
	}

	public static Query getIntQuery(int xfId, String field) {
		return NumericRangeQuery.newIntRange("xfId", xfId, xfId, true, true);
	}


	public static Query multiFieldQuery(String keyWord, String type) throws Exception{
		String [] filed ;
		if("1".equals(type)) {
			filed = new String []{"title"};
		} else if("2".equals(type)){
			filed = new String [] {"content"};
		} else {
			filed = new String [] {"title", "content"};
		}
		MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_46, filed, new IKAnalyzer(true));
		parser.setPhraseSlop(0);
		Query query = parser.parse(keyWord);

		return query;
	}

	/**
	 * 完全搜索
	 *
	 * @param field
	 * @param keyWord
	 * @return
	 */
	public static Query getTermQuery(String field, String keyWord) throws IOException {
		keyWord = keyWord.replaceAll("[、,;，；\\(\\)]", "");
		IKSegmenter ikSeg = new IKSegmenter(new StringReader(keyWord), true);
		Lexeme lexeme = null;
		String maxFenci = "";

		while ((lexeme = ikSeg.next()) != null) {
			if (lexeme.getLexemeText().length() > 1) {
				if (maxFenci.length() < lexeme.getLexemeText().length()) {
					maxFenci = lexeme.getLexemeText();
				}
			}
		}
		if ("".equals(maxFenci)) {
			return null;
		} else {
			Term term = new Term(field, maxFenci);
			return new TermQuery(term);
		}
	}

	/**
	 * 短语搜索
	 *
	 * @param keyWord
	 * @param slop
	 * @return
	 */
	public static Query getPhraseQuery(String filed, String keyWord, int slop) {
		PhraseQuery query = new PhraseQuery();
		query.setSlop(slop);
		String[] parms = keyWord.split("");
		for (int i = 0; i < parms.length; i++) {
			if (!"".equals(parms[i])) {
				query.add(new Term(filed, parms[i]));
			}
		}
		return query;
	}

	public static Query getFuzzyQuery(String field, String keyWord) {
		FuzzyQuery fuzzyQuery = new FuzzyQuery(new Term(field, keyWord));
		return fuzzyQuery;
	}
}
