package mahmoud.sigir.search;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;


import mahmoud.utils.MinHeap;
import mahmoud.utils.Result;
import mahmoud.utils.WieghtedTerm;

import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.search.TopDocs;

public class GroundTruthSearcher extends Searcher 
{


	public GroundTruthSearcher(String IndexLocation) 
	{
		super(IndexLocation);
		// TODO Auto-generated constructor stub
	}
	public MinHeap<Result> luceneSearch(ArrayList<WieghtedTerm> Q)
	{
		TopDocs hits = null;
		try 
		{
			hits= luceneSearcher.search(constructQuery(Q),  Searcher.TopK_DOCs);
		} 
		catch (IOException e) 
		{
			e.printStackTrace();
			System.out.println("GroundTruth error while searching");
		}
		return constructResults(hits);
	}
	public MinHeap<Result> searchUsingLuceneReader(ArrayList<WieghtedTerm> Q)
	{
		MinHeap<Result> output = new MinHeap<Result>(20);
		HashMap <String , WieghtedTerm> queryHM = new HashMap <String , WieghtedTerm> ();
		for (WieghtedTerm wt : Q)
		{
			double newWieght =0;
			if(queryHM.containsKey(wt.Term))
				newWieght = queryHM.get(wt.Term).wieght;
			queryHM.put(wt.Term, new WieghtedTerm(wt.Term,wt.wieght+newWieght ) );
		}

		try
		{
			for(int i=0;i<luceneReader.maxDoc();i++)
			{
				TermFreqVector TFV =  luceneReader.getTermFreqVector(i, Field);
				Document doc = luceneReader.document(i);
				String[] terms = TFV.getTerms();
				int[] frequencies = TFV.getTermFrequencies();
				float DocLength = 0;
				double score =0;
				for(int f :frequencies)
					DocLength += f;
				if(TFV!= null)
					for(int k = 0 ; k< frequencies.length ; k++)
					{
						double newTFIDF, TF,IDF;
//						if(terms[k].length()<4)
//							continue;
						if(queryHM.containsKey(terms[k]))
						{
							TF = (frequencies[k]/DocLength);
							IDF = Math.log10(luceneReader.maxDoc()/(1+luceneReader.docFreq(new Term(Field,terms[k]))));
							newTFIDF = TF *IDF ;
							score += newTFIDF;
						}

					}
				//				if(score>0)
				output.add(new Result(i,doc.get("docID"),  doc.get("Title"), doc.get("All"),doc.get("filePath"), score ));
			}
		}
		catch (Exception e)
		{
			e.printStackTrace();
			System.out.println("what happened?");
			System.exit(0);
		}
		return output;
	}

	public static void main(String[] argc)
	{
		System.out.println("from GTsearcher");
		GroundTruthSearcher s = new GroundTruthSearcher(Searcher.OhsumedIndexLocation);
		s.openSearcher();
		ArrayList<WieghtedTerm> Q = new ArrayList<WieghtedTerm>();
		Q.add(new WieghtedTerm("breast", 1));
		Q.add(new WieghtedTerm("prostate", 1));
		Q.add(new WieghtedTerm("cancer", 1));
		long start = System.currentTimeMillis();
		System.out.print(s.luceneSearch(Q));
		long end = System.currentTimeMillis();
		System.out.println("Evaluation Took: "+(end-start)/1000.0+" Seconds.");
		s.closeSearcher();

		//		s.constructQuery(Q);

	}

}
