package dyyx;

import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.FSDirectory;

import dyyx.domain.SearchResult;
import dyyx.domain.WordFrequency;

public class LuceneUtil {

	private static final String FIELD_NAME = "default";

	private static final Analyzer analyzer = new SmartChineseAnalyzer();

	private static final String WEB_INDEX_PATH = CommConst.WEB_INDEX_DIR;

	private static final IndexSearcher searcher = getIndexSearcher();
	
	static{
		getIndexSearcher() ;
	}

	private static synchronized IndexSearcher getIndexSearcher() {
		if (searcher != null) {
			return searcher;
		}
		try {
			IndexReader reader = DirectoryReader.open(FSDirectory.open(Paths.get(WEB_INDEX_PATH)));
			IndexSearcher searcherTmp = new IndexSearcher(reader);
			//searcher = searcherTmp;
			return searcherTmp;
		} catch (Throwable e) {
			throw new RuntimeException("getIndexSearcher error", e);
		}

	}
	
	public static Map getById(int id) throws Exception {
		Document doc = searcher.doc(id);

		Map map = new HashMap();
		map.put("id", id);
		map.put("path", doc.get("path"));
		map.put("content", doc.get("content"));
		return map;
	}
	
	public static Map getByPath(String path) throws Exception {
		// QueryParser parser = new QueryParser("path", analyzer);
		// String q = path;
		Query query = new TermQuery(new Term("path",path));
		
		TopDocs results = searcher.search(query, 1);
		ScoreDoc[] hits = results.scoreDocs;
		
		if(hits==null || hits.length<=0){
			return null;
		}
		
		// int num = hits.length;
		ScoreDoc sdoc = hits[0];
		int id = sdoc.doc;
		Document doc = searcher.doc(id);

		Map map = new HashMap();
		map.put("id", id);
		map.put("path", doc.get("path"));
		map.put("score", sdoc.score);
		map.put("content", doc.get("content"));

		return map;
	}


	public static SearchResult doSearch(String q, int maxRows) throws Exception {
		
		QueryParser parser = new QueryParser("content", analyzer);
		Query query = parser.parse(q);
		
		TopDocs results = searcher.search(query, maxRows);
		ScoreDoc[] hits = results.scoreDocs;
		
		int num = hits.length;
		
		SearchResult sr = new SearchResult();
		sr.num = num;
		
		Document doc = null;
		Map map = null;
		
        for(ScoreDoc item:hits){
			
		    doc = searcher.doc(item.doc);	
		    
		    map = new HashMap();
		    map.put("id", item.doc);
		    map.put("path", doc.get("path"));
		    map.put("score", item.score);
		    
		    sr.list.add(map);
		}
		
		
		

		return sr;
	}

	public static Map<String, AtomicLong> analyzeWordFrequency(String text, int minLength) throws Exception {
		if (text == null || text.isEmpty()) {
			return null;
		}
		if (minLength <= 0) {
			minLength = 1;
		}
		SmartChineseAnalyzer smartChineseAnalyzer = new SmartChineseAnalyzer();
		TokenStream tokenStream = smartChineseAnalyzer.tokenStream(FIELD_NAME, text);

		tokenStream.reset();
		CharTermAttribute charTermAttribute = null;
		String word = null;
		Map<String, AtomicLong> map = new HashMap<String, AtomicLong>();
		AtomicLong al = null;
		while (tokenStream.incrementToken()) {
			charTermAttribute = tokenStream.getAttribute(CharTermAttribute.class);
			word = charTermAttribute.toString();
			if (word == null || word.length() <= minLength) {
				continue;
			}
			al = map.get(word);
			if (al == null) {
				al = new AtomicLong(0);
				map.put(word, al);
			}
			al.incrementAndGet();

		}
		smartChineseAnalyzer.close();
		return map;
	}

	public static List<WordFrequency> buildWordFrequencys(Map<String, AtomicLong> map) {
		if (map == null || map.isEmpty()) {
			return null;
		}
		Set<Map.Entry<String, AtomicLong>> kvs = map.entrySet();
		List<WordFrequency> list = new ArrayList<WordFrequency>();
		WordFrequency wf = null;
		for (Map.Entry<String, AtomicLong> item : kvs) {
			wf = new WordFrequency();
			wf.word = item.getKey();
			wf.frequency = item.getValue().get();
			list.add(wf);
		}

		Collections.sort(list);

		return list;
	}

	public static void merge(Map<String, AtomicLong> map1, Map<String, AtomicLong> map2) {
		if (map1 == null || map2 == null || map2.isEmpty()) {
			return;
		}
		Set<Map.Entry<String, AtomicLong>> kvs = map2.entrySet();
		String word = null;
		AtomicLong frequency = null;
		AtomicLong tmp = null;

		for (Map.Entry<String, AtomicLong> item : kvs) {
			word = item.getKey();
			frequency = map1.get(word);
			if (frequency == null) {
				frequency = new AtomicLong(0);
				map1.put(word, frequency);
			}
			tmp = item.getValue();
			frequency.getAndAdd(tmp.get());
		}
	}

	public static void main(String[] args) throws Exception {

	}
}
