package s;

import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;

import org.apache.lucene.index.AtomicReader
import org.apache.lucene.index.DocsAndPositionsEnum
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.search.DocIdSetIterator;
//import org.apache.lucene.index.TermFreqVector;


class HighFreqTerms {


	private final AtomicReader ar;
	private final int highFreqWordsTotal;

	//	final Map<String /* word */, Integer/* tf */> word_tfidfMap = new HashMap<String, Integer>();

	HighFreqTerms(final AtomicReader r, int n) {
		ar = r;

		System.out.println("ir is " + ar.toString());
		highFreqWordsTotal = n;
		print ("total " + n + " total docs " + ar.numDocs());
	}

	public List getHighFrqTerms(int numbTerms){
		//Fields f = ir.getTermVectors(0);
	//	AtomicReader ar;

		//Terms t = SlowCompositeReaderWrapper.wrap(ir).terms(Constants.FIELD_CONTENTS);
		Fields f =ar.fields();
		Terms t = f.terms(Constants.FIELD_CONTENTS)
		TermsEnum te = t.iterator(null);
		//te= ir.terms(new Term(Constants.FIELD_CONTENTS, ""))
		
		BytesRef term;
		def termMap =[:]
		while ((term = te.next())!=null){
			println (" term " + te.term().utf8ToString() + " totla freq " + te.totalTermFreq());
			termMap[te.term().utf8ToString()]=te.totalTermFreq()
			println " term " + te.term().utf8ToString() + " doc freq " + te.docFreq();
			
			DocsAndPositionsEnum de =te.docsAndPositions(null, null)
			int docid
			while ((docid=de.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS){
				println "docid " + docid + " fre1 " +  de.freq()
				for (int i=0; i<de.freq(); i++){
					println " de.nextpos " + de.nextPosition() 				
					println " "
				}
				
			}
		}
		def wordList =termMap.keySet().toList()
		wordList.sort {termMap[it]} 
		
	//	wordList.each {word ->
		//	println word + " : " + termMap[word]
	//	}
		
		def m = Math.min(wordList.size, numbTerms)
		m = 0-m;
		wordList[-1..m].each {word ->
			println "top3 " + word + " : " + termMap[word]
		}
		return wordList[-1..m]
		
	}
}
