package reaction.europarl.ranking;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;

import reaction.europarl.Definitions;
import reaction.europarl.Ranking;
import reaction.europarl.queries.QueryEL;
import reaction.europarl.queries.QueryELComparator;
import reaction.europarl.queries.candidates.Candidate;
import reaction.europarl.queries.candidates.CandidateComparator;
import reaction.europarl.queries.features.Features;
import reaction.util.misc.BigFile;

public class SVMRank {
	
	public static void svmRankFormat(List<QueryEL> queries, String outputfile) throws IOException {
			
			FileWriter fstream = new FileWriter(outputfile);
			BufferedWriter out = new BufferedWriter(fstream);
	
			//to avoid: "ERROR: Query ID's in data file have to be in increasing order" from SVMRank
			//sort queries according to id  in increasing order
			Collections.sort(queries, new QueryELComparator());
			
			//find max for each feature to be used in normalization: simply divide by the max value
			double[] max = new double[24];
			
			for (int i = 0; i < max.length; i++) {
				max[i] = 0;
			}
			
			for (QueryEL q : queries) {
				for (Candidate c : q.candidates) {
					Features f = c.features;
					double[] features = f.featuresVector();
					for (int i = 0; i < features.length; i++) {
						if (features[i]>max[i])
							max[i] = features[i];
					}
				}
			}
			
			for (QueryEL q : queries) {
				
				out.write("#" + q.query_id + " " + q.URL_answer + "\n");
				
				for (Candidate c : q.candidates) {
					
					double[] vector = c.features.featuresVector();
					
					// correct entity (1) or just another candidate (0)					
					if (q.URL_answer.equalsIgnoreCase("NIL"))
						out.write("0"+" ");
					
					else {
						
						String[] answer_parts = q.URL_answer.split("/");
						if (answer_parts[answer_parts.length-1].equalsIgnoreCase(c.URL))
							out.write("1"+" ");
						else out.write("0"+" ");
					}
					
					// query identifier
					String[] query_id_parts = q.query_id.split("-");
					out.write("qid:"+Integer.parseInt(query_id_parts[query_id_parts.length-1])+" ");
					
					// features
					for (int i = 0; i < vector.length; i++) {
						if (max[i]==0)
							out.write((i+1)+":"+vector[i]+" ");
						else out.write((i+1)+":"+vector[i]/max[i]+" ");
					}
					out.write("#" + c.URL);
					out.write("\n");
				}
			}
			out.close();
		}

	public static void train() {
		
		//TODO: catch stdoutput, stderror			
		Runtime runtime = Runtime.getRuntime();
		String learn_arguments = "-c 3 svmrank-train.dat svmrank-trained-model.dat";
				
		//Train a model
		System.out.println("\nTraining SVMRank model: ");
		System.out.println(Definitions.SVMRankPath+Definitions.SVMRanklLearn+' '+learn_arguments);
		
		//call SVMRank
		Process svmLearn = null;
		try {
			svmLearn = runtime.exec(Definitions.SVMRankPath+Definitions.SVMRanklLearn+' '+learn_arguments);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		try {
			svmLearn.waitFor();
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void test() {
		
		Runtime runtime = Runtime.getRuntime();
		String classify_arguments = "svmrank-test.dat svmrank-trained-model.dat svmrank-predictions";
		
		//Test the trained model
		System.out.println("\nTesting SVMRank model: ");
		System.out.println(Definitions.SVMRankPath+Definitions.SVMRanklClassify+' ' + classify_arguments);
		
		//call SVMRank
		Process svmClassify = null;
		try {
			svmClassify = runtime.exec(Definitions.SVMRankPath+Definitions.SVMRanklClassify + ' ' + classify_arguments);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		try {
			svmClassify.waitFor();
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void accuracy(List<QueryEL> answers) throws Exception {
		
		for (int j = 0; j < Ranking.Testqueries.size(); j++) {
			if (answers.get(j).query_id.equalsIgnoreCase(Ranking.Testqueries.get(j).query_id)) {
				Ranking.Testqueries.get(j).answer = answers.get(j).answer;
			}
		}				
		
		int NIL_correct = 0;
		int PER_correct = 0;
		int number_NIL = 0;
		int number_PER = 0;
		
		for (QueryEL q : Ranking.Testqueries) {	
			
			if (q.candidatesRanked.size()>0) {
				
				if (!q.URL_answer.equalsIgnoreCase("NIL")) {
					number_PER += 1;
					if (q.URL_answer.equalsIgnoreCase("http://pt.wikipedia.org/wiki/"+q.answer)) {
						PER_correct += 1;						
					}
						
				}
				
				else {
					number_NIL += 1;
					if (q.URL_answer.equalsIgnoreCase(q.answer)) {
						NIL_correct += 1;
					}
				}
			} 
		}
		
		System.out.println("Accurracy:");
		System.out.println("ALL:" + (float) (PER_correct+NIL_correct)/ (float) (number_PER+number_NIL));
		System.out.println("PER:" + (float) PER_correct/ (float) number_PER);
		System.out.println("NIL:" + (float) NIL_correct/ (float) number_NIL);
	}
		
	public static void parse(String predictionsFilePath, String goundtruthFilePath, HashMap<Integer, Double> predictions, List<QueryEL> queries) throws Exception{
		
		BigFile predictionsFile = new BigFile(predictionsFilePath);
		BigFile goundtruthFile = new BigFile(goundtruthFilePath);
		int i=0;
			
		for (String line : predictionsFile) {
			predictions.put(i, Double.parseDouble(line));
			i++;
		}
			
		i=0;
		QueryEL q = null;
			
		for (String line: goundtruthFile) {
				
			if (line.startsWith("#")) {
				if (q!=null) {
					queries.add(q);					
				}
				
				//Extracts the query ID and the correct entity's ID
				//#EL00003 E0182788
				String[] data = line.split("\\s");
				String query_id = data[0].split("#")[1];
				String correct_answer = data[1];				
						
				//Builds new query and fills the query_id and correct answer
				q = new QueryEL();
				q.query_id = query_id;
				q.URL_answer = correct_answer;

				continue; //to avoid increment of i
			}
				
			else  {
				String[] data = line.split("\\s");
				
				//entity's id at the end of the line #E0554903
				String entity = data[(data.length)-1].split("#")[1];
					
				Candidate c = new Candidate();
				c.ranking_score = predictions.get(i);
				c.entity = entity;				
				q.candidates.add(c);
				
				//TODO: parse features from the line to Candidate object
				//c.entity.id = entity_id;	//.split("#")[1];					
				//c.features.outDegreeNormalized = (double) Double.parseDouble( data[24].split(":")[1] );
				//c.features.inDegreeNormalized = (double) Double.parseDouble( data[25].split(":")[1] );
			}
				i++;				
			}
			//to add last query
			queries.add(q);
		}

	public static List<QueryEL> gatherResults(String predictionsFilePath, String goundtruthFilePath) throws Exception {
		
		HashMap<Integer, Double> predictions = new HashMap<Integer, Double>();
		List<QueryEL> queries = new LinkedList<QueryEL>();
		
		parse(predictionsFilePath,goundtruthFilePath,predictions,queries);
		
		//rank candidates according to classification scores
		for (QueryEL q : queries) {
			q.candidatesRanked = new ArrayList<Candidate>(q.candidates);
			Collections.sort(q.candidatesRanked, new CandidateComparator());
		}
		
		return queries;
	}
		
	public static void generateRankingScores() throws Exception {
		
		//apply the trained model to the training queries in order to get the ranking scores 
		//the ranking scores are then used to extract features for training a SVMClassifier to detect NIL queries
		
		Runtime runtime = Runtime.getRuntime();
		System.out.println("\nCalculating ranking scores for train queries to extract features for SVMLight");
		String classify_arguments = "svmrank-train.dat svmrank-trained-model.dat svmrank-predictions_training_set";
		System.out.println(Definitions.SVMRankPath+Definitions.SVMRanklClassify+' '+classify_arguments);
		Process svmRankClassify = runtime.exec(Definitions.SVMRankPath+Definitions.SVMRanklClassify+' '+classify_arguments);
		svmRankClassify.waitFor();
		
		HashMap<Integer, Double> predictions = new HashMap<Integer, Double>();
		List<QueryEL> queries = new LinkedList<QueryEL>();
		
		// parse the output of SVMRank
		parse("svmrank-predictions_training_set","svmrank-train.dat",predictions,queries);
		
		// rank candidates according to classification scores
		for (QueryEL q : queries) {
			q.candidatesRanked = new ArrayList<Candidate>(q.candidates);
			Collections.sort(q.candidatesRanked, new CandidateComparator());
		}
		
		// copy candidates ranked to Ranking.TrainQueries structure
		for (int i = 0; i < Ranking.Trainqueries.size(); i++) {
			if  (queries.get(i).query_id.equalsIgnoreCase(Ranking.Trainqueries.get(i).query_id)) {
				//create ranked list in queriesTrain with the contents from TrainningQueriesScore
				Ranking.Trainqueries.get(i).candidatesRanked = new ArrayList<Candidate>(queries.get(i).candidatesRanked);
			}
		}
		
		
		
	}
}