package reaction.europarl;

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.lucene.analysis.KeywordAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser.Operator;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.util.Version;

import reaction.europarl.classify.SVMLight;
import reaction.europarl.queries.QueryEL;
import reaction.europarl.queries.candidates.Candidate;
import reaction.europarl.ranking.SVMRank;
import reaction.util.misc.BigFile;

public class Main {
		
	public static void main(String[] args) throws Exception {
		
		//create Options object
		Options options = new Options();

		// add options
		options.addOption("run", false, "train a ranking model");
		options.addOption("recall", false, "to tune the recall");
		
		// add argument options
		Option queriesTrain = OptionBuilder.withArgName("queriesTrain").hasArg().withDescription("XML file containing queries for trainning").create( "queriesTrain" );
		Option queriesTest = OptionBuilder.withArgName("queriesTest").hasArg().withDescription("XML file containing queries for testing").create( "queriesTest" );
		
		options.addOption(queriesTrain);
		options.addOption(queriesTest);		
		
		CommandLineParser parser = new GnuParser();
		CommandLine line = parser.parse( options, args );
		
		if (args.length == 0) {
			// automatically generate the help statement
			HelpFormatter formatter = new HelpFormatter();
			formatter.printHelp(" ", options );
			System.exit(0);
		}
		
		else {
			
			if (line.hasOption("run")) completeRun(line);	
			else if (line.hasOption("recall")) recall(line);
				
		}
	}
	
	public static void completeRun(CommandLine line) throws Exception  {
		
		Definitions.init();
		
		System.out.println("Processing trainning queries");		
		loadQueries(line.getOptionValue("queriesTrain"),Ranking.Trainqueries);
		processQueries(Ranking.Trainqueries);
		
		System.out.println("\nProcessing test queries");		
		loadQueries(line.getOptionValue("queriesTest"),Ranking.Testqueries);
		processQueries(Ranking.Testqueries);
		
		System.out.println("\nExtracting features from Train Queries");
		Ranking.extractFeatures(Ranking.Trainqueries,true);
		SVMRank.svmRankFormat(Ranking.Trainqueries, "svmrank-train.dat");
		SVMRank.train();
		
		// Train SVM model to perform NIL detection using the ranking scores as features
		SVMRank.generateRankingScores();
		System.out.println("\nExtracting features for NIL detector");		
		SVMLight.train(Ranking.Trainqueries,"NIL_train.dat");
		
		// Apply the SVMRank model to the test queries
		System.out.println("\nExtracting features from Test Queries");
		Ranking.extractFeatures(Ranking.Testqueries,false);
		SVMRank.svmRankFormat(Ranking.Testqueries, "svmrank-test.dat");
		SVMRank.test();

		
		// Gather ranking results from output and added to Ranking.TestQueries
		List<QueryEL> queriesCandidatesRanked = SVMRank.gatherResults("svmrank-predictions","svmrank-test.dat");
		
		for (int i = 0; i < Ranking.Testqueries.size(); i++) {			
			// check if queries are the same before getting top-ranked candidate
			if  (queriesCandidatesRanked.get(i).query_id.equalsIgnoreCase(Ranking.Testqueries.get(i).query_id)) {
				Ranking.Testqueries.get(i).candidatesRanked = new ArrayList<Candidate>(queriesCandidatesRanked.get(i).candidatesRanked);
			}
		}
		
		// Apply the SVM model to detect whether top-ranked entity is NIL
		SVMLight.classify(Ranking.Testqueries,"NIL_test.dat");
		
		//gather results: read "NIL_predictions" line per line
		HashMap<Integer, Double> NIL_predictions = new HashMap<Integer, Double>();		
		BigFile predictionsFile = new BigFile("NIL_predictions");
		int i=0;
		
		for (String prediction : predictionsFile) {
			NIL_predictions.put(i, Double.parseDouble(prediction));
			i++;
		}
		
		//read NIL_test.dat line by line: get last #EL11051 E0081133
		BigFile queries = new BigFile("NIL_test.dat");
		i=0;
		
		List<QueryEL> queries_answers = new LinkedList<QueryEL>();
		String output = ("results-SVMRank-NIL-Detector-no-NIL-clustering.txt");		
		PrintStream out = new PrintStream( new FileOutputStream(output));

		for (String query: queries) {
			
			String query_id = query.split("\\#")[1].split("\\s")[0];
			String answer = query.split("\\#")[1].split("\\s")[1];
			out.print(query_id);
			
			QueryEL q = new QueryEL();
			q.query_id = query_id;			
			
			
			if (NIL_predictions.get(i)>=1) {
				out.println("\tNIL");
				q.answer = "NIL";
			}
			else {				
				out.println("\t"+answer);
				q.answer = answer;
			}
			i++;
			queries_answers.add(q);
		}		
		out.close();
				
		recallStatistics(line,Ranking.Testqueries);		
		SVMRank.accuracy(queries_answers);	
	}

	
	public static void loadQueries(String queries_file, List<QueryEL> queries) {
		
		System.out.println("Loading queries from: " + queries_file);
		
		try {
			
			FileInputStream fstream = new FileInputStream(queries_file);
			DataInputStream in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));			
			String strLine;
			
			while ((strLine = br.readLine()) != null)   {
				String[] data = strLine.split("\t");
				QueryEL q = new QueryEL(data[0], data[1], data[2], data[3], data[4], data[5]);
				queries.add(q);
			}
			
			in.close();
			System.out.println(queries.size() + " queries loaded");
		}
		catch (Exception e)	{
			//Catch exception if any
			System.err.println("Error: " + e.getMessage());
		}
	}
	
	public static void recall(CommandLine line) throws CorruptIndexException, IOException, ParseException {
		
		Definitions.init();
		loadQueries(line.getOptionValue("queriesTest"), Ranking.Testqueries);
		processQueries(Ranking.Testqueries);
		recallStatistics(line,Ranking.Testqueries);
	}

	public static void recallStatistics(CommandLine line, List<QueryEL> queries) throws CorruptIndexException, IOException, ParseException {
		
		int answer_found = 0;
		int answer_not_found = 0;
		int NIL_queries = 0;
		int total_n_docs = 0;
		int queries_zero_docs = 0;
		boolean found = false;
		
		for (QueryEL q : queries) {
			
			if (q.candidates.size()==0) {
				queries_zero_docs +=1;
				continue;
			}
				
			else {
				
				if (!q.URL_answer.equalsIgnoreCase("NIL")) {
					
					found = false;
					
					for (Candidate c : q.candidates) {
						total_n_docs += 1;
						if (("http://pt.wikipedia.org/wiki/"+c.URL).equalsIgnoreCase(q.URL_answer)) {
							answer_found += 1;
							found = true;
						}
					}
						
					if (found==false)
						answer_not_found += 1;
				} else NIL_queries += 1;
			}
		}
			
		float miss_rate = (float) answer_not_found / ((float) (queries.size()-NIL_queries));
		float coverage = (float) answer_found / ((float) (queries.size()-NIL_queries));
		float no_candidates = (float) queries_zero_docs / ((float) (queries.size()-NIL_queries));
		
		System.out.println("Documents Retrieved: " + total_n_docs);
		System.out.println("Total Queries: " + Integer.toString(queries.size()));
		System.out.println("Queries NIL: " + NIL_queries);		
		System.out.println("Docs p/ query: " + ( (float) total_n_docs / (float) queries.size()));
		System.out.println("Queries with 0 docs retrieved: " + Integer.toString(queries_zero_docs) + " (" + no_candidates * 100 + "%)" );
		System.out.println("Queries Not Found (Miss Rate): " + answer_not_found + " (" + miss_rate * 100 + "%)" );
		System.out.println("Queries Found (Coverage): " + answer_found + " (" + coverage * 100 + "%)" );
		
	}
	
	public static void accuracyStatistics(List<QueryEL> queries) {
		
		int NIL_correct = 0;
		int PER_correct = 0;
		int number_NIL = 0;
		int number_PER = 0;
		
		for (QueryEL q : queries) {
			
			Candidate best = q.bestCandidate();
			
			if (best!=null) {
				if (q.URL_answer!="NIL") {
					number_PER += 1;
					//System.out.println(q.URL_answer + '\t' + "http://pt.wikipedia.org/wiki/"+best.URL);
					if (q.URL_answer.equalsIgnoreCase("http://pt.wikipedia.org/wiki/"+best.URL))
						PER_correct += 1;
				}
				number_NIL += 1;
			}
			else NIL_correct += 1;			
		}
		
		System.out.println("Accurracy:");
		System.out.println("ALL:" + (float) (PER_correct+NIL_correct)/ (float) (number_PER+number_NIL));
		System.out.println("PER:" + (float) PER_correct/ (float) number_PER);
		System.out.println("NIL:" + (float) NIL_correct/ (float) number_NIL);
		
	}
	
	public static void processQueries(List<QueryEL> queries) throws ParseException, IOException {
		
		for (QueryEL q : queries) {			
			q.getSupportDocument();			
			queryLucene(q);
			System.out.println(q.query_string + "\t" + q.candidates.size() );
		}		
	}
	
	public static void queryLucene(QueryEL query) throws ParseException, IOException {
		
		KeywordAnalyzer analyzer = new KeywordAnalyzer();		
		MultiFieldQueryParser queryParser = new MultiFieldQueryParser(Version.LUCENE_35, new String[] {"entity", "plaintitle","wiki_text"},analyzer);
		
		Operator op = org.apache.lucene.queryParser.QueryParser.Operator.OR;
		queryParser.setDefaultOperator(op);
		org.apache.lucene.search.Query q = queryParser.parse(query.query_string);
		
		//System.out.println(q);
		
		TopDocs docs = Definitions.wikipediaSearcher.search(q, 50);
		
		ScoreDoc[] scoreDocs = docs.scoreDocs;
		
		for (int i = 0; i < scoreDocs.length; i++) {
			Document d = Definitions.wikipediaSearcher.doc(scoreDocs[i].doc);			
			Candidate c = new Candidate(d.get("entity"), d.get("URL"), d.get("categories"), d.get("wiki_text"));
			c.lucene_score = scoreDocs[i].score;
			query.candidates.add(c);
		}
	}
}