package mahmoud.sigir.test;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.TreeMap;
import java.util.Map.Entry;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;

import mahmoud.utils.DateUtils;
import mahmoud.utils.MinHeap;
import mahmoud.utils.PorterStemAnalyzer;
import mahmoud.utils.QueryPair;
import mahmoud.utils.Result;
import mahmoud.utils.WieghtedTerm;
import mahmoud.sigir.search.QueryGen;

public class ohsumedTester 
{
	//	private OhsumedQueryParser OQP;
	public Analyzer analyzer = null;
	public  TreeMap<Integer, QueryPair> StoredQueries;
	public  TreeMap<Integer, HashSet<String> > QueryRelevantDocuments;
	private QueryGen QG;
	private String Field = "All";
	//	private BufferedWriter BR;
	public static final String ohsumedDenseQuery = "results/ohsumed/ohsumed_denseQuer.csv";

	public ohsumedTester() 
	{
		try 
		{

			//			BR = new BufferedWriter(new FileWriter("results/ohsumed/"+DateUtils.now()+"_new_ohsumed_results.csv",false));
		} 
		catch (Exception e) 
		{
			e.printStackTrace();
			System.out.println("Tried to open file to save data in ohsumedtester failed");
			return;
		}
		//		OQP = new OhsumedQueryParser("/media/Data/dDocuments/Code/ohsumed/misc/queries.txt", "/media/Data/dDocuments/Code/ohsumed/misc/drel.i");
		StoredQueries = new TreeMap<Integer, QueryPair> ();
		QueryRelevantDocuments = new TreeMap<Integer, HashSet<String> > ();
//		QG = new QueryGen("/media/Data/dDocuments/Code/builtIndexes/OHSUMED");
				QG = new QueryGen("D:/dDocuments/Code/builtIndexes/OHSUMED");
		analyzer = new PorterStemAnalyzer();
	}

	public  TreeMap<Integer, QueryPair> parseQueryFile(String pFile) 
	{
		String sn =  null, patientinfo = null, query=null; 
		try 
		{
			//use buffering, reading one line at a time
			//FileReader always assumes default encoding is OK!
			BufferedReader BR =  new BufferedReader(new FileReader(pFile));
			String line=null, preline = null; //not declared within while loop
			/*
			 * readLine is a bit quirky :
			 * it returns the content of a line MINUS the newline.
			 * it returns null only for the END of the stream.
			 * it returns an empty String if two newlines appear in a row.
			 */
			while (( line = BR.readLine()) != null)
			{
				if(line.contains(".I"))
				{
					if(query!= null & patientinfo!=null)
						this.StoredQueries.put(Integer.parseInt(sn), new QueryPair(sn,query,patientinfo));
					sn= line.replace(".I", "").trim();
				}
				if(preline!= null && preline.contains(".B"))
					patientinfo=(line);
				if(preline!= null && preline.contains(".W"))
					query = line;
				preline =line;
			}
			this.StoredQueries.put(Integer.parseInt(sn), new QueryPair(sn,query,patientinfo));
			BR.close();
			return this.StoredQueries;
		}
		catch (IOException ex)
		{
			ex.printStackTrace();
			return null;
		}
	}
	public  TreeMap<Integer, HashSet<String> > parseEvalFile(String pFile) 
	{
		try 
		{
			BufferedReader BR =  new BufferedReader(new FileReader(pFile));
			String line=null; 
			while (( line = BR.readLine()) != null)
			{
				String qid, docid;
				HashSet<String> Temp = new HashSet<String>() ;
				String[] x = line.split("\t");
				qid = x[0]; docid= x[1];
				int iqid = Integer.parseInt(qid);
				if(QueryRelevantDocuments.containsKey(iqid))
				{
					Temp = QueryRelevantDocuments.get(iqid);
				}
				Temp.add(docid);
				QueryRelevantDocuments.put(iqid, Temp);
			}
			BR.close();
			return QueryRelevantDocuments;
		}
		catch (IOException ex)
		{
			ex.printStackTrace();
			return null;
		}
	}
	public void conductTestFromSavedDenseQ(String savedFile)
	{
		TreeMap<Integer, ArrayList<WieghtedTerm>> denseQureies = retrieveDenseQueries(savedFile);
		QG.GTS.openSearcher();
		BufferedWriter BR;
		try 
		{
			BR = new BufferedWriter (new FileWriter("results/ohsumed/"+DateUtils.now()+"_new_ohsumed_results.csv",false));
			BR.append("Query Number, Basic Search Matches, Search Time, Query Construction Time, Dense Search Matches, Time, Query Size\n");
			

			while(!denseQureies.isEmpty())
			{
				int count =0;
				Entry<Integer, ArrayList<WieghtedTerm>> e = denseQureies.pollFirstEntry();

				long start = System.currentTimeMillis();
				System.out.println("Query Number: "+ e.getKey() );//+ " Query: \t"+e.getValue());
				MinHeap<Result> r1 = QG.GTS.luceneSearch(e.getValue());
				MinHeap<Result> r2 = QG.GTS.searchUsingLuceneReader(e.getValue());
				System.out.println(r1);
				System.out.println(r2);
				count = matchHeaps(r1, r2);
				long end = System.currentTimeMillis();
				System.out.println("Total Matches for query: " + count);
				BR.append(e.getKey()+", "+count+ ", "+((end-start)/1000.0)+"\n ");
			}
			
			QG.GTS.closeSearcher();
//			System.out.println("It took :"+ (end-start)/1000+ " Seconds");
		} 
		catch (IOException e1) 
		{
			e1.printStackTrace();
			System.out.println("could save results to file");
			System.exit(0);
		}
	}
//	void conductTest() throws Exception
//	{
//		System.out.println("conducting test");
//		BR.append("Query Number, Basic Search Matches, Search Time, Query Construction Time, Dense Search Matches, Time, Query Size\n");
//		ArrayList<WieghtedTerm> iQuery = new ArrayList<WieghtedTerm>();
//		while(!StoredQueries.isEmpty())
//		{
//			Entry<Integer, QueryPair> M = StoredQueries.pollFirstEntry();
//			//			int QueryID =M.getKey();
//			QueryPair QP = M.getValue();
//			String newQuery = QP.Query;
//			System.out.println(QP.SN+ "\t"+ QP.Query +" :");
//			iQuery.clear();
//			////////////////////////////////
//			TokenStream stream = analyzer.tokenStream(Field, new StringReader(newQuery));
//			TermAttribute term = stream.addAttribute(TermAttribute.class);
//			try {
//				while(stream.incrementToken()) 
//				{
//					//					System.out.print("[" + term.term() + "] ");
//					iQuery.add(new WieghtedTerm(term.term(), 1));
//				}
//			} catch (IOException e) {
//				// TODO Auto-generated catch block
//				e.printStackTrace();
//			}
//
//			long start, end;
//			QG.GTS.openSearcher();
//			start = System.currentTimeMillis();
//			MinHeap<Result> r1 = QG.GTS.luceneSearch(iQuery);
//			end = System.currentTimeMillis();
//			MinHeap<Result> r1_copy = r1.clone();
//			int resultMatches = validateResults(M.getKey(), r1);
//			BR.append(QP.SN +", "+resultMatches+ ", "+((end-start)/1000.0)+", ");
//
//			start = System.currentTimeMillis();
//			//			System.out.println(r1_copy.sz);
//			ArrayList <WieghtedTerm> newQ = QG.constructDenseQueryfromTD(r1_copy,iQuery, QueryGen.OhsumedFile,0);
//			end = System.currentTimeMillis();
//			BR.append((end-start)/1000.0 +", ");
//			//			System.out.println("dense Query Sie: "+ newQ.size()+"\t"+ newQ);
//			start = System.currentTimeMillis();
//			MinHeap<Result> r2 = QG.GTS.luceneSearch(newQ);
//			end = System.currentTimeMillis();
//			resultMatches = validateResults(M.getKey(), r2);
//			BR.append(resultMatches+ ", "+(end-start)/1000.0+", "+newQ.size()+"\n");
//
//			System.out.println("searching Took: "+(end-start)/1000.0+" Seconds.");
//		}
//		BR.close();
//	}
	private int validateResults(int id,MinHeap<Result> r1)
	{
		int total =0;
		while(!r1.isEmpty())
		{
			Result r = (Result) r1.poll();

			if(QueryRelevantDocuments.containsKey(id))
				if(QueryRelevantDocuments.get(id).contains(r.realDocId))
					total++;
		}
		System.out.println("Total Matches: "+total);
		return total;
	}
	public TreeMap<Integer, ArrayList<WieghtedTerm>> retrieveDenseQueries(String filePath)
	{
		int i=1;
		BufferedReader myReader;
		TreeMap<Integer, ArrayList<WieghtedTerm>> result = new TreeMap<Integer, ArrayList<WieghtedTerm>>();
		try
		{
			String line;
			myReader = new BufferedReader(new FileReader(filePath));
			while((line = myReader.readLine())!=null)
			{
				if(line.trim().length()==0)
					continue;
				ArrayList<WieghtedTerm> al = new ArrayList<WieghtedTerm>();
				//				line = line.lastIndexOf(",")
				for(String unit: line.split(","))
				{
					unit = unit.trim();
					if(unit.length()==0)
						continue;
					String[] values =unit.split(":");
					String term = values[0];
					double wieght = Double.parseDouble(values[1]);
					al.add(new WieghtedTerm(term, wieght));
				}
				result.put(i++, al);
			}

		}
		catch (Exception e)
		{
			e.printStackTrace();
			System.out.println("failed while retirveing denseQueries");
		}
		return result;
	}
	public void generateDenseQueryandSave(String qfile)
	{
		TreeMap<Integer, QueryPair> x = parseQueryFile(qfile);
		BufferedWriter QueryfileB;
		try 
		{
			QueryfileB = new BufferedWriter(new FileWriter("results/ohsumed/ohsumed_denseQuer.csv",false));
			System.out.println("generating dense queries");
			//			QueryfileB.append("Query Number, Basic Search Matches, Search Time, Query Construction Time, Dense Search Matches, Time, Query Size\n");
			ArrayList<WieghtedTerm> sparseQuery = new ArrayList<WieghtedTerm>();
			long start = System.currentTimeMillis();
			while(!x.isEmpty())
			{
				Entry<Integer, QueryPair> M = x.pollFirstEntry();
				QueryPair QP = M.getValue();
				String newQuery = QP.Query;
				System.out.println(QP.SN+ "\t"+ QP.Query);
				sparseQuery.clear();
				TokenStream stream = analyzer.tokenStream(Field, new StringReader(newQuery));
				TermAttribute term = stream.addAttribute(TermAttribute.class);
				while(stream.incrementToken()) 
				{
					sparseQuery.add(new WieghtedTerm(term.term(), 1));
				}
				QG.GTS.openSearcher();
				MinHeap<Result> r1 = QG.GTS.luceneSearch(sparseQuery);
				ArrayList <WieghtedTerm> denseQuery = QG.constructDenseQueryfromTD(r1,sparseQuery, QueryGen.OhsumedFile,0);
				System.out.println("dense query size: "+ denseQuery.size());
				for(WieghtedTerm wt : denseQuery)
				{
					QueryfileB.append(wt.Term + ":"+wt.wieght+",");
				}
				QueryfileB.append("\n");
				//				QueryfileB.
			}
			long end = System.currentTimeMillis();
			System.out.println("saving dense queires Took: "+(end-start)/1000.0+" Seconds.");
			QueryfileB.close();
		} 
		catch (IOException e1) 
		{
			e1.printStackTrace();
			System.out.println("failed while trying to generate and save dense queires to file");
		}
	}
	public int matchHeaps(MinHeap<Result> a, MinHeap<Result> b)
	{
		int result  =0;
		HashSet<String> ax = new HashSet<String>();
		while(!a.isEmpty())
		{
			Result r =(Result) a.poll();
			ax.add(r.realDocId);
		}
		while(!b.isEmpty())
		{
			Result rb = (Result)b.poll();
			if(ax.contains(rb.realDocId))
				result++;
		}
		return result;
	}

	public static void main(String[] args) 
	{
		// TODO Auto-generated method stub
		ohsumedTester ot = new ohsumedTester();

		//		ot.parseQueryFile("resources/ohsumed/misc/queries.txt");
		//		ot.parseEvalFile("resources/ohsumed/misc/drel.i");


		try {
			//			ot.conductTest();
			//			ot.generateDenseQueryandSave("resources/ohsumed/misc/queries.txt");
			//			System.out.println(ot.retrieveDenseQueries("results/ohsumed/ohsumed_denseQuer.csv").size());
			ot.conductTestFromSavedDenseQ(ohsumedDenseQuery);

		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		//		System.out.println(ot.x.pollFirstEntry().getValue().Query);
		/////////////
		System.out.println("ohsumed testing");






	}

}
