
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Version;
import org.apache.lucene.index.TermsEnum;

public class RelevantFeedBack {

	public String performRelevantFeedBack(List<Integer> relevantDocs) throws IOException{
		FSDirectory idx = FSDirectory.open(new File("index-directory"));
		DirectoryReader ireader = DirectoryReader.open(idx);
		Bits liveDocs = MultiFields.getLiveDocs(ireader);
		
		// store terms and freq
		TreeMap<String, Integer> termTree = new TreeMap<String, Integer>();
		
		Iterator<Integer> itr = relevantDocs.iterator();
		while(itr.hasNext()){//loop relevant doc
			Fields fields = ireader.getTermVectors(itr.next()); // there is another version with two para
			Iterator<String> it = fields.iterator();
			while(it.hasNext()){// loop field
				String fieldName = it.next();
				Terms terms = fields.terms(fieldName);
				TermsEnum termsEnum = terms.iterator(null);
				while(termsEnum.next() != null){// loop term
					BytesRef t = termsEnum.term();
					String term = t.utf8ToString();
					DocsEnum docsEnum = termsEnum.docs(liveDocs, null);
					docsEnum.nextDoc();
					if(termTree.get(term) == null){
						termTree.put(term,docsEnum.freq());
						System.out.println("terms  " + term+" freq" + docsEnum.freq());
					}else{
						termTree.put(term, docsEnum.freq()+termTree.get(term));
					}

				}
			}
		}
		
		
		// find top terms
		TreeSet<Map.Entry<String, Integer>> termSet = entriesSortedByValues(termTree);
		int count = 0; 
		List<String> newKeywords = new ArrayList<String>();
		String newKeyword = "";
		Iterator<Map.Entry<String, Integer>> setIter = termSet.iterator();
		while(setIter.hasNext() && count <= 10){
			//String regex = "[0-9]+";
			String keyword = setIter.next().getKey();
			/*if (keyword.matches(regex)) {
				continue;
			}*/
			newKeyword += " " + keyword;
			count++;
		}
		System.out.println("Feedback................"+newKeyword);
		return newKeyword;
	}
	
	TreeSet<Map.Entry<String, Integer>> entriesSortedByValues(Map<String,Integer> map) {
		TreeSet<Map.Entry<String, Integer>> sortedEntries = new TreeSet<Map.Entry<String, Integer>>(
	        new Comparator<Map.Entry<String, Integer>>() {
	            public int compare(Map.Entry<String, Integer> e1, Map.Entry<String, Integer> e2) {
	                return e2.getValue().compareTo(e1.getValue());
	            }
	        }
	    );
	    sortedEntries.addAll(map.entrySet());
	    return sortedEntries;
	}
}
