package mahmoud.sigir.search;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.Map.Entry;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.search.Explanation.IDFExplanation;

import mahmoud.sigir.parse.OhsumedQueryParser;
import mahmoud.utils.MinHeap;
import mahmoud.utils.MaxHeap;
import mahmoud.utils.PorterStemAnalyzer;
import mahmoud.utils.QueryPair;
import mahmoud.utils.Result;
import mahmoud.utils.WieghtedTerm;

public class VantagePointSearcher extends Searcher {

	HashMap<String, Double> docIDFCache;
	DistanceResult[] distanceArray;
	String filename = "ohsumed_distance.dat";
	OhsumedQueryParser oqp;
	public Analyzer analyzer = null;

	public VantagePointSearcher(String Location) {
		super(Location);
		// TODO Auto-generated constructor stub
		docIDFCache = new HashMap<String, Double>();
		analyzer = new PorterStemAnalyzer();

	}

	@Override
	public MinHeap<Result> luceneSearch(ArrayList<WieghtedTerm> Q) {
		// TODO Auto-generated method stub
		return null;
	}

	private HashMap<String, Double> getCorpusCenteroid()
	{
		HashMap<String, Double> VP_vector = new HashMap<String, Double>();
		int maxDocs = luceneReader.maxDoc();
		try 
		{
			for(int i=0;i< luceneReader.maxDoc();i++)
			{
				TermFreqVector TFV =luceneReader.getTermFreqVector(i, Field);
				String[] terms = TFV.getTerms();
				int[] freqs = TFV.getTermFrequencies();
				double docLength=0;
				for(int f: freqs)
				{
					docLength+= f;
				}
				//			Arrays.
				for(int j =0 ;j<terms.length;j++)
				{
					Double newTFIDF,old_TFIDF=0.0,IDF,TF;
					if(VP_vector.containsKey(terms[j]))
						old_TFIDF = VP_vector.get(terms[j]);
					TF =  freqs[j]/docLength;
					if(docIDFCache.containsKey(terms[j]))
						IDF = docIDFCache.get(terms[j]);
					else
					{
						IDF = Math.log10(maxDocs/(1+luceneReader.docFreq(new Term(Field,terms[j]))));
						if(IDF.isInfinite() || IDF.isNaN())
							IDF =0.0;
						docIDFCache.put(terms[j],IDF);
					}
					newTFIDF = old_TFIDF + (TF * IDF);
					VP_vector.put(terms[j], newTFIDF);
				}

			}

		} 
		catch (Exception e) 
		{
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return VP_vector;
	}
	public Double getMag(HashMap<String, Double> V)
	{
		Iterator i = V.keySet().iterator();
		double mag =0.0;
		while(i.hasNext())
		{
			mag += Math.pow(V.get(i.next()),2);
		}
		mag =Math.sqrt(mag);
		return mag;
	}
	public Double getAngularDistance(HashMap<String, Double> m1, double mag1,HashMap<String, Double> m2, double mag2)
	{
		if (m1.size() > m2.size()) {
			HashMap<String, Double> swap = m1;
			m1 = m2;
			m2 = swap;
		}
		double accum = 0d;
		for (Object key : m1.keySet())
			if (m2.containsKey(key))
				accum += m1.get(key)*m2.get(key);
		return accum / (mag1 * mag2);
		
//		//Set<String> S1 = V1.keySet();
//		//Set<String> S2 = V2.keySet();
//		//if(mag1==0) mag1 = getMag(V1);
//		//if(mag2==0) mag2 = getMag(V2);
//
//		//		S1.retainAll(S2);
//		Iterator itResults; 
//		if(V1.size()<=V2.size())
//			itResults = S1.iterator();
//		else
//			itResults = S2.iterator();
//
//		Double Result =0.0;
//		while(itResults.hasNext())
//		{
//			String t = (String) itResults.next();
//			if(V1.containsKey(t)&&V2.containsKey(t))
//				Result+= (V1.get(t)*V2.get(t));
//
//		}
//		Result = Result / mag1 / mag2;
//		return Math.acos(Result)*180 /Math.PI;
	}
	public void loadDistanceFromFile()
	{
		try 
		{
			BufferedReader fr = new BufferedReader(	new FileReader(filename));
			int count=0;
			while(fr.readLine()!=null)
			{
				count++;
			}
			distanceArray = new DistanceResult[count];
			fr.close();
			fr = new BufferedReader(new FileReader(filename));
			String text;
			count =0;
			while((text=fr.readLine())!=null)
			{
				String[] tokens = text.split("\t");
				distanceArray[count]= new DistanceResult(Integer.parseInt(tokens[0]), Double.parseDouble(tokens[1]), Double.parseDouble(tokens[2]));
				count++;
			}
			Arrays.sort(distanceArray);
//			System.out.println(distanceArray[0]);
			//			System.out.println(data[]);
		} 
		catch (Exception e) 
		{
			e.printStackTrace();
		}

	}


	public HashMap<String, Double> getVectorFromLucene(int id)
	{
		HashMap<String, Double> resultVector = new HashMap<String, Double>();
		TermFreqVector TFV = null;
		try 
		{
			TFV = luceneReader.getTermFreqVector(id, Field);

			String[] terms = TFV.getTerms();
			int[] freqs = TFV.getTermFrequencies();
			double docLength=0;

			for(int f: freqs)
			{
				docLength+= f;
			}
			//			Arrays.
			for(int j =0 ;j<terms.length;j++)
			{
				Double newTFIDF,old_TFIDF,IDF,TF;
				old_TFIDF =0.0;
				if(resultVector.containsKey(terms[j]))
					old_TFIDF = resultVector.get(terms[j]);
				TF =  freqs[j]/docLength;
				if(docIDFCache.containsKey(terms[j]))
					IDF = docIDFCache.get(terms[j]);
				else
				{
					IDF = Math.log10(luceneReader.maxDoc()/(1+luceneReader.docFreq(new Term(Field,terms[j]))));
					if(IDF.isInfinite() || IDF.isNaN())
						IDF =0.0;
					docIDFCache.put(terms[j],IDF);
				}
				newTFIDF = old_TFIDF + (TF*IDF);
				resultVector.put(terms[j], newTFIDF);
			}
		} 
		catch (IOException e) 
		{
			// TODO Auto-generated catch block
			e.printStackTrace();
			System.exit(0);
		}
		return resultVector;
	}
	public void constructDistanceTable()
	{
		HashMap<String, Double> VP = getCorpusCenteroid();
		//		System.out.println("Construction completed"+"Doc Mag: "+ getMag(VP) ); 
		HashMap<String, Double> doc_vector = new HashMap<String, Double>();
		//		DistanceResult[] dr = new DistanceResult[luceneReader.maxDoc()];
		try 
		{
			FileWriter os = new FileWriter(new File("ohsumed_distance.dat"));

			for(int i=0;i< luceneReader.maxDoc();i++)
			{
				doc_vector = getVectorFromLucene(i);
				//				System.out.println("VP size"+ VP.size());
				double vpMag = getMag(VP);
				double docMag = getMag(doc_vector);
				double dist = getAngularDistance(doc_vector,docMag, VP,vpMag);
				//				dr[i] = new DistanceResult(i,dist, docMag);
				os.append(i+"\t"+dist+"\t"+docMag+"\n");
				//				System.out.println("doc Id:"+ i+ "Distance to VP: "+dist+" Doc Mag: "+docMag);

				//				Iterator it = doc_vector.keySet().iterator();
				//				while(it.hasNext())
				//				{
				//					String term = (String) it.next();
				//					System.out.println(term+ ": "+doc_vector.get(term));
				//				}
				//				break;
			}
			//			Arrays.sort(dr);
			//			System.out.println("Max dist:"+ dr[0].distance+ "\t min dist: "+dr[dr.length-1].distance);



		} 
		catch (Exception e) 
		{
			// TODO Auto-generated catch block
			e.printStackTrace();
		}


	}
	public void test()
	{
		HashMap<String, Double> V1 = new HashMap<String, Double>();
		HashMap<String, Double> V2 = new HashMap<String, Double>();
		V1.put("Mahmoud", 1.0);
		V1.put("Elborawi", 4.0);
		V2.put("Mahmoud", 3.0);
		V2.put("Elborawi", 1.0);
		System.out.println(getAngularDistance(V1,0, V2,0));
	}
	
	public MaxHeap find_top(double queryDistance,HashMap <String,Double> iQuery)
	{
		DistanceResult target = new DistanceResult(10, queryDistance, 10);
		int lowerBound, upperBound;
		Integer candidate= null;
		double iQueryMag = getMag(iQuery);
		MaxHeap<DistanceResult> mHeap = new MaxHeap<DistanceResult> (20);
		double low_gap   = Double.MAX_VALUE;
		double upper_gap = Double.MAX_VALUE;
		double max_gap=0;
		boolean validUpperBound = true, validLowerBound = true;
		
		Arrays.sort(distanceArray);
		Integer i =Arrays.binarySearch(distanceArray, target);
		i = Math.abs(i);
		lowerBound = i-2;
		upperBound=i-1;
		if(upperBound > distanceArray.length-1 || upperBound <0)
			validUpperBound = false;
		if(lowerBound > distanceArray.length-1 || lowerBound <0)
			validLowerBound = false;
		
//		DistanceResult heapMax = (DistanceResult) mHeap.max();
		int count = 0;
		double prevMax=0;
		while( (validUpperBound || validLowerBound))
		{
			// Invariant: we need to keep adding and there are elements to add

			boolean choose_upper = true; // if true we take the upper bound, otherwise the lower bound
			upper_gap = Double.MAX_VALUE;
			low_gap = Double.MAX_VALUE;

			// If only one bound valid
			if (validLowerBound && !validUpperBound) {
				// Have to add lower
				choose_upper = false;
			} else if (validUpperBound && !validLowerBound) {
				// Have to add upper
			} else {			
				low_gap = Math.abs(distanceArray[lowerBound].distance-queryDistance);
				upper_gap = Math.abs(distanceArray[upperBound].distance-queryDistance);
				if(low_gap < upper_gap) {
					choose_upper = false;
				} 
			}

			// Now let's add the chosen index
			if(!choose_upper)
			{
				candidate = lowerBound;
				max_gap = low_gap;
				if(lowerBound >0)
					lowerBound--;
				else
					validLowerBound = false;
			}
			else // choose_upper = true
			{
				candidate = upperBound;
				max_gap = upper_gap;
				if(upperBound < distanceArray.length-1)
					upperBound++;
				else
					validUpperBound = false;
			}
			
			HashMap<String, Double> candidateVector = getVectorFromLucene(distanceArray[candidate].docID);
			double canMag = distanceArray[candidate].docVectorMag;
			double canDistance = getAngularDistance(iQuery, iQueryMag, candidateVector, canMag);
			DistanceResult nearCandidate = new DistanceResult(distanceArray[candidate].docID, canDistance,canMag);
			mHeap.add(distanceArray[candidate]);
//			MaxHeap.
//			MaxHeap.add(element)
			//				Add_candidtae to_Heap 
			// update max_atual distance between query and candidates
			DistanceResult maxInHeap = (DistanceResult) mHeap.max();
			count++;
//			System.out.println("Toatl: "+ distanceArray.length+" "+lowerBound+":"+upperBound+"\tIteration: "+(count++)+ "\tcondition: "+ (maxInHeap.distance <= max_gap) + "\tHeap filled: "+ mHeap.isFull() + " Heap changed: "+ (prevMax != maxInHeap.distance) );
			prevMax = maxInHeap.distance;
			
			if(maxInHeap.distance <= max_gap && mHeap.isFull())
				break;
		}
		System.out.println(count +" Iteration of a maximum of: "+distanceArray.length);
		return mHeap;
		
	}
	
	
	public void search()
	{
		oqp = new OhsumedQueryParser("/media/Data/dDocuments/Code/ohsumed/misc/queries.txt", "/media/Data/dDocuments/Code/ohsumed/misc/drel.i");
		TreeMap<Integer, QueryPair> queries = oqp.parseQueryFile();
		HashMap <String,Double> iQuery = new HashMap<String, Double>();
		HashMap<String, Double>  centoid = getCorpusCenteroid();
		double ceMag = getMag(centoid);		
		int count =0;
		while(!queries.isEmpty())
		{
			iQuery.clear();
			Entry<Integer, QueryPair> M = queries.pollFirstEntry();	
			QueryPair QP = M.getValue();
			String newQuery = QP.Query;
			System.out.println(QP.SN+ "\t"+ QP.Query +" :");
			TokenStream stream = analyzer.tokenStream(Field, new StringReader(newQuery));
			TermAttribute term = stream.addAttribute(TermAttribute.class);
			try {
				while(stream.incrementToken()) 
				{
					String myterm =term.term();
					double value;
					if(iQuery.containsKey(myterm))
						value = iQuery.get(myterm)+1.0;
					else
						value = 1.0;
					iQuery.put(myterm, value);
					//					iQuery.add(new WieghtedTerm(term.term(), 1));
				}
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			Iterator<String> it = iQuery.keySet().iterator();
			while(it.hasNext())
			{
				String nextTerm = it.next();
				double idf=1;;
				if(docIDFCache.containsKey(nextTerm))
					idf = docIDFCache.get(nextTerm);
				double value = iQuery.get(nextTerm) * idf;
				iQuery.put(nextTerm, value);
			}
			double queryMag = getMag(iQuery);
			double queryDistance = getAngularDistance(centoid, ceMag, iQuery, queryMag);
			MaxHeap resultHeap = find_top(queryDistance,iQuery);
			System.out.println(resultHeap.size());
//			break;
		}
//		return null;

	}
	public static void main(String[] argc)
	{
		VantagePointSearcher VPS = new VantagePointSearcher(Searcher.OhsumedIndexLocation);
		VPS.openSearcher();
		long start = System.currentTimeMillis();
		//		VPS.constructDistanceTable();
		VPS.loadDistanceFromFile();
		System.out.println("loading finished");
		VPS.search();

		long end = System.currentTimeMillis();
		System.out.println("Took: "+(end-start)/1000.0+" Seconds.");

	}

}
