package com.ossean.match.lucene;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;

import org.apache.lucene.document.Document;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.queryparser.classic.QueryParser.Operator;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wltea.analyzer.lucene.IKAnalyzer;

import com.ossean.match.matchprocess.MatchIncrement;
import com.ossean.match.matchprocess.MemoInfos;
import com.ossean.match.utils.Normalizer;

public class LuceneSearch {
	private static Logger logger = LoggerFactory.getLogger(LuceneSearch.class);
	/**
	 * memos match to projects by Lucene
	 * 
	 * @param keyWords
	 * @param searchField
	 * @param weight
	 * @param matchMap
	 * @param memoHistory
	 * @param indexReader
	 * @return
	 */
	public static HashMap<Integer, Double> memoToPrjMatchByLucene(String keyWords,
			List<String> keyWordsList, String searchField, double weight,
			HashMap<Integer, Double> matchMap, IndexReader prjIndexReader) {
		try {
			IndexSearcher is = new IndexSearcher(prjIndexReader);
			BooleanQuery query = new BooleanQuery();
			Similarity similarity = new DefaultSimilarity(){
				@Override
				public float coord(int overlap, int maxOverlap) {
					return overlap*overlap*overlap / (float)maxOverlap;
				}
				@Override
				public float queryNorm(float sumOfSquaredWeights) {
				  return 1.0f;
			    }  
			};
			is.setSimilarity(similarity);
			
			for(String keyWordsTerm : keyWordsList){
				Term term = new Term(searchField, keyWordsTerm);
				TermQuery tq = new TermQuery(term);
				query.add(tq, BooleanClause.Occur.SHOULD);
			}
			TopDocs td = is.search(query, 10000);  
			ScoreDoc[] sds = td.scoreDocs;
			for (ScoreDoc sd : sds) {
				Document d = is.doc(sd.doc);
				String prjId = d.get(LuceneIndex.prjIdFieldName);
				String[] prjNames = d.getValues(searchField);
				for(String prjName : prjNames){
					if (keyWords.contains(prjName)) {
						int pId = Integer.parseInt(prjId);
						if (matchMap.containsKey(pId)) {
							matchMap.put(pId, matchMap.get(pId) + weight + sd.score/1000);
						} else
							matchMap.put(pId, weight + sd.score/1000);
					}
				}
			}
		} catch (IOException e) {
			logger.error("memoToPrjMatchByLucene IOException: " + e);
		} 
		return matchMap;
	}

	/**
	 * projects match to memos by Lucene
	 * 
	 * @param prjName
	 * @param projId
	 * @param searchFiled
	 * @param weight
	 * @param map
	 * @param memoHistory
	 * @param indexReader
	 * @return
	 */
	public static HashMap<Integer, MemoInfos> prjToMemoMatchByLucene(
			String prjName, String searchField, double weight,
			HashMap<Integer, MemoInfos> map, IndexReader memoIndexReader, IndexReader prjIndexReader, MatchIncrement matchIncrement) {

		try {
			IndexSearcher is = new IndexSearcher(memoIndexReader);
			Term t = new Term(searchField, prjName);
			Query query = new TermQuery(t);
//			QueryParser paser = new QueryParser(searchField, new IKAnalyzer());
//			paser.setDefaultOperator(Operator.AND);
//			Query query = paser.Query(prjName);
//			List<String> prjNameList = Normalizer.getList(prjName);
//			BooleanQuery query = new BooleanQuery();
//			Similarity similarity = new DefaultSimilarity(){
//				 @Override
//				 public float queryNorm(float sumOfSquaredWeights) {
//				   return 1.0f;
//			     }
//				@Override
//				public float lengthNorm(FieldInvertState state) {
//				    return 1.0f;
//				}  
//			};
//			is.setSimilarity(similarity);
//			
//			for(String prjNameTerm : prjNameList){
//				Term term = new Term(searchField, prjNameTerm);
//				TermQuery tq = new TermQuery(term);
//				query.add(tq, BooleanClause.Occur.MUST);
//			}
			TopDocs td = is.search(query, 1000000);
			ScoreDoc[] sds = td.scoreDocs;
			for (ScoreDoc sd : sds) {
				Document d = is.doc(sd.doc);
				if (sd.score >= 0.7) {
					String postId = d.get(LuceneIndex.memoIdFieldName);
					int pId = Integer.parseInt(postId);
					String experience = d.get(LuceneIndex.experienceFieldName);
					String addr = d.get(LuceneIndex.addrFieldName);
					String salary = d.get(LuceneIndex.salaryFieldName);
					MemoInfos memoInfos = getMemoInfos(experience, addr, salary, matchIncrement);
					if (map.containsKey(pId)) {
						memoInfos.setWeight(memoInfos.getWeight() + weight + sd.score/1000);
						map.put(pId, memoInfos);
					} else{
						memoInfos.setWeight(weight + sd.score/1000);
						map.put(pId, memoInfos);
					}
				}
			}
		} catch (IOException e) {
			logger.error("prjToMemoMatchByLucene IOException: " + e);
		}
		return map;
	}
	
	public static MemoInfos getMemoInfos(String experience, String addr, String salary, MatchIncrement matchIncrement){
		MemoInfos memoInfos = new MemoInfos();
		String experienceString = "";
		experienceString = experience.replaceAll(" ", "");
		if (experienceString.equals("1-3年经验")) {
			experienceString = "经验1-3年";
		}
		else if (experienceString.equals("3-5年经验")) {
			experienceString = "经验3-5年";
		}
		else if (experienceString.equals("5-10年经验")) {
			experienceString = "经验5-10年";
		}
		else if (experienceString.equals("应届毕业生")) {
			experienceString = "经验应届毕业生";
		}
		else if (experienceString.equals("10年以上经验")) {
			experienceString = "经验10年以上";
		}
		else if(experienceString.equals("")){
			experienceString = "经验不限";
		}
		memoInfos.setExperience(experienceString);
		for (int i = 0; i < matchIncrement.getCityList().size(); i++) {
            if(addr.contains(matchIncrement.getCityList().get(i))){
            	if (matchIncrement.getCityList().get(i) == "朝阳") {
            		memoInfos.setCity("北京");
				}
            	else {
            		memoInfos.setCity(matchIncrement.getCityList().get(i));
				}
            }
        }
		String salaryNum = Normalizer.StringFilter(salary);
		int []salaries = new int[]{0,0};
		int j = 0;
		for(String num : salaryNum.split(" ")){
        	if (num.trim().length() > 0) {
        		salaries[j] = Integer.parseInt(num);
        		if (j == 1) 
    				continue;
        		j++;
			}
		}
		memoInfos.setMin_salary(salaries[0]);
		memoInfos.setMax_salary(salaries[1]);
		return memoInfos;
	}
	
	public  static HashMap<Integer, Double> searchByMemoContentList(String tagStr, String searchField, String idField, double weight,
			HashMap<Integer, Double> map, IndexReader indexReader) {

			try {
				IndexSearcher is = new IndexSearcher(indexReader);
				Term t = new Term(searchField, tagStr);
				Query query = new TermQuery(t);
				TopDocs td = is.search(query, 100000);
				ScoreDoc[] sds = td.scoreDocs;
				for (ScoreDoc sd : sds) {
					Document d = is.doc(sd.doc);
					String postId = d.get(idField);
					int pId = Integer.parseInt(postId);
					if (map.containsKey(pId)) {
						map.put(pId, map.get(pId) + weight + sd.score/1000);
					}
				}
			} catch (IOException e) {
				logger.error("prjToMemoMatchByLucene IOException: " + e);
			}
			return map;
	}
	
	public  static HashMap<Integer, MemoInfos> searchByPrjTag(String tagStr, String searchField, String idField, double weight,
			HashMap<Integer, MemoInfos> map, IndexReader indexReader) {

			try {
				IndexSearcher is = new IndexSearcher(indexReader);
				Term t = new Term(searchField, tagStr);
				Query query = new TermQuery(t);
				TopDocs td = is.search(query, 100000);
				ScoreDoc[] sds = td.scoreDocs;
				for (ScoreDoc sd : sds) {
					Document d = is.doc(sd.doc);
					String postId = d.get(idField);
					int pId = Integer.parseInt(postId);
					if (map.containsKey(pId)) {
						map.get(pId).setWeight(map.get(pId).getWeight() + weight + sd.score/1000);
					}
				}
			} catch (IOException e) {
				logger.error("prjToMemoMatchByLucene IOException: " + e);
			}
			return map;
	}
	
	// 获得每个帖子匹配到的标签个数
	public static HashMap<Integer, Integer> tagsMatch(String idField,
			String tags, String searchField, IndexReader indexReader) {
		HashMap<Integer, Integer> tagsMatchNum = new HashMap<Integer, Integer>();
		try {
			IndexSearcher is = new IndexSearcher(indexReader);
			QueryParser parser = new QueryParser(searchField, new IKAnalyzer(true));
			Query query = parser.parse(tags);
			TopDocs td = is.search(query, 100000);
			ScoreDoc[] sds = td.scoreDocs;
			for (ScoreDoc sd : sds) {
					Document d = is.doc(sd.doc);
					String postId = d.get(idField);
					int pId = Integer.parseInt(postId);
					Explanation explanation = is.explain(query, sd.doc);
					int hitNum = getHitTermsNum(explanation);
					if (tagsMatchNum.containsKey(pId)) {
						tagsMatchNum.put(pId, tagsMatchNum.get(pId) + hitNum);
					} else
						tagsMatchNum.put(pId, hitNum);
				}
			} catch (IOException e) {
				logger.error("tagsMatch IOException: " + e);
			} catch (ParseException e) {
				logger.error("tagsMatch ParseException: " + e);
			}

		return tagsMatchNum;
	}
	
	//get the number of terms hitted in docs
	public static int getHitTermsNum(Explanation explanation){
		int num = 0;
		String coord = explanation.getDetails()[(explanation.getDetails().length - 1)].toString();
		if (coord.length() > 20 || coord.length() <= 0) {
			num = 1;
		}
		else {
			String numStr = coord.substring(coord.indexOf('(')+1, coord.indexOf('/'));
			num = Integer.parseInt(numStr);
		}
		return num;
	}
	
}
