/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package ufpr.mestrado.ais.experiments.util;

import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.logging.Level;
import java.util.logging.Logger;

import jmetal.base.Algorithm;
import jmetal.base.Problem;
import jmetal.base.SolutionSet;
import jmetal.experiments.Experiment;
import jmetal.experiments.Settings;
import jmetal.util.JMException;
import ufpr.mestrado.ais.experiments.ExperimentMNO;
import ufpr.mestrado.ais.problems.ProblemFactoryMNO;
import ufpr.mestrado.ais.qualityIndicator.QualityIndicatorMNO;
import ufpr.mestrado.ais.util.PrintAndreFile;

/**
 * 
 * @author antonio
 */
public class runExperimentMNO extends Thread {

	public ExperimentMNO experiment_;
	public int id_;
	public HashMap<String, Object> map_;
	public int numberOfThreads_;
	public int numberOfProblems_;

	int first_;
	int last_;

	String experimentName_;

	String[][] algorithmNameList_;

	String[] problemList_; // List of problems to be solved

	Object[][][] problemConfigList_; // List of problems to be solved

	HashMap<String, HashMap<Integer, String>> paretoFrontFile_;

	// corresponding to the problems in problemList_
	String[] indicatorList_; // List of the quality indicators to be applied

	String experimentBaseDirectory_; // Directory to store the results

	String latexDirectory_; // Directory to store the latex files

	String rDirectory_; // Directory to store the generated R scripts

	String paretoFrontDirectory_; // Directory containing the Pareto front files

	String outputParetoFrontFile_; // Name of the file containing the output

	// Pareto front
	String outputParetoSetFile_; // Name of the file containing the output

	// Pareto set
	int independentRuns_; // Number of independent runs per algorithm

	Settings[] algorithmSettings_; // Paremeter settings of each algorithm

	public runExperimentMNO(ExperimentMNO experiment,
			HashMap<String, Object> map, int id, int numberOfThreads,
			int numberOfProblems) {

		super.setName(experiment.getClass().getName() + " | " + id);

		experiment_ = experiment;

		id_ = id;

		map_ = map;

		numberOfThreads_ = numberOfThreads;

		numberOfProblems_ = numberOfProblems;

		int partitions = numberOfProblems / numberOfThreads;

		first_ = partitions * id;

		if (id == (numberOfThreads - 1)) {

			last_ = numberOfProblems - 1;

		} else {

			last_ = first_ + partitions - 1;
		}

		Logger.getLogger(getClass().getName()).info(
				"Id: " + id + "  Partitions: " + partitions + " First: "
						+ first_ + " Last: " + last_);
	}

	@SuppressWarnings("unchecked")
	public void run() {

		Algorithm[] algorithm; // jMetal algorithms to be executed

		String experimentName = (String) map_.get("experimentName");

		experimentBaseDirectory_ = (String) map_.get("experimentDirectory");

		algorithmNameList_ = (String[][]) map_.get("algorithmNameList");

		problemList_ = (String[]) map_.get("problemList");

		problemConfigList_ = (Object[][][]) map_.get("problemConfigList");

		indicatorList_ = (String[]) map_.get("indicatorList");

		paretoFrontDirectory_ = (String) map_.get("paretoFrontDirectory");

		paretoFrontFile_ = (HashMap<String, HashMap<Integer, String>>) map_
				.get("paretoFrontFile");

		independentRuns_ = (Integer) map_.get("independentRuns");

		outputParetoFrontFile_ = (String) map_.get("outputParetoFrontFile");

		outputParetoSetFile_ = (String) map_.get("outputParetoSetFile");

		int numberOfAlgorithms = algorithmNameList_[0].length;

		algorithm = new Algorithm[numberOfAlgorithms];

		SolutionSet[][] resultFront = new SolutionSet[independentRuns_][numberOfAlgorithms];

		Logger.getLogger(getClass().getName()).info(
				"Experiment: Number of algorithms: " + numberOfAlgorithms);

		Logger.getLogger(getClass().getName()).info(
				"Experiment: runs: " + independentRuns_);

		Logger.getLogger(getClass().getName())
				.info("Nombre: " + experimentName);

		Logger.getLogger(getClass().getName()).info(
				"experimentDirectory: " + experimentBaseDirectory_);

		Logger.getLogger(getClass().getName()).info(
				"numberOfThreads_: " + numberOfThreads_);

		Logger.getLogger(getClass().getName()).info(
				"numberOfProblems_: " + numberOfProblems_);

		Logger.getLogger(getClass().getName()).info("first: " + first_);

		Logger.getLogger(getClass().getName()).info("last: " + last_);

		for (int problemId = first_; problemId <= last_; problemId++) {

			final Problem[] problem = configureProblem(problemId, algorithm);

			for (int runs = 0; runs < independentRuns_; runs++) {

				for (int i = 0; i < numberOfAlgorithms; i++) {

					Logger.getLogger(getClass().getName()).fine(
							algorithm[i].getClass().toString());
					// STEP 6: create output directories

					String directory = experimentBaseDirectory_ + "/data/"
							+ algorithmNameList_[problemId][i] + "/"
							+ problemList_[problemId];

					File experimentDirectory = new File(directory);

					if (!experimentDirectory.exists()) {

						new File(directory).mkdirs();

						Logger.getLogger(getClass().getName()).fine(
								"Creating " + directory);
					}

					// STEP 7: run the algorithm
					Logger.getLogger(getClass().getName()).info(
							"Running algorithm: "
									+ algorithmNameList_[problemId][i]
									+ ", problem: " + problemList_[problemId]
									+ ", run: " + runs);
					try {

						try {
							problem[i].resetTotalFitnessEvaluation();

							resultFront[runs][i] = algorithm[i].execute();

						} catch (ClassNotFoundException e) {
							Logger.getLogger(Experiment.class.getName()).log(
									Level.SEVERE, "Erro", e);
						}
					} catch (JMException ex) {
						Logger.getLogger(Experiment.class.getName()).log(
								Level.SEVERE, null, ex);
					}

					// STEP 8: put the results in the output directory
					resultFront[runs][i].printObjectivesToFile(directory + "/"
							+ outputParetoFrontFile_ + "-" + runs + ".txt");
					resultFront[runs][i].printVariablesToFile(directory + "/"
							+ outputParetoSetFile_ + "-" + runs + ".txt");
				}
			}

			final PrintAndreFile andreFile = new PrintAndreFile();
			// printa todos
			for (int i = 0; i < numberOfAlgorithms; i++) {

				final SolutionSet[] solutionSets = new SolutionSet[independentRuns_];

				String path = experimentBaseDirectory_ + "/data/"
						+ algorithmNameList_[problemId][i] + "/"
						+ problemList_[problemId] + "/tchebycheff.txt";

				for (int runs = 0; runs < independentRuns_; runs++) {
					solutionSets[runs] = resultFront[runs][i];
				}

				andreFile.printObjectivesToTchebycheff(path, solutionSets);
			}

			calcQualityIndicators(problemId, numberOfAlgorithms, problem,
					resultFront);

		} // for
	}

	private void calcQualityIndicators(final int problemId,
			final int numberOfAlgorithms, final Problem[] problem,
			final SolutionSet[][] resultFront) {

		// STEP 9: calculate quality indicators
		for (int pointer_qi = 0; paretoFrontFile_.get(problemList_[problemId]) != null
				&& pointer_qi < indicatorList_.length; pointer_qi++) {

			final HashMap<Integer, QualityIndicatorMNO> map = new HashMap<Integer, QualityIndicatorMNO>();

			for (int pointer_algorithms = 0; pointer_algorithms < numberOfAlgorithms; pointer_algorithms++) {

				final String directory = experimentBaseDirectory_ + "/data/"
						+ algorithmNameList_[problemId][pointer_algorithms]
						+ "/" + problemList_[problemId];

				final File experimentDirectory = new File(directory);

				final String paretoFile = paretoFrontFile_.get(
						problemList_[problemId]).get(
						problem[pointer_algorithms].getNumberOfObjectives());

				Logger.getLogger(getClass().getName()).fine(
						"PF file: " + paretoFile);

				Logger.getLogger(getClass().getName()).info(
						algorithmNameList_[problemId][pointer_algorithms]
								+ " - Running: " + indicatorList_[pointer_qi]);

				QualityIndicatorMNO indicators = map
						.get(problem[pointer_algorithms]
								.getNumberOfObjectives());

				if (indicators == null) {

					indicators = new QualityIndicatorMNO(
							problem[pointer_algorithms], paretoFile);

					map.put(problem[pointer_algorithms].getNumberOfObjectives(),
							indicators);
				}

				for (int pointer_runs = 0; pointer_runs < independentRuns_; pointer_runs++) {

					Logger.getLogger(getClass().getName()).fine(
							indicatorList_[pointer_qi] + " - Run: "
									+ pointer_runs);

					FileWriter os = null;

					double value = 0;

					try {
						if (indicatorList_[pointer_qi].equals("HV")) {

							value = indicators
									.getHypervolume(resultFront[pointer_runs][pointer_algorithms]);

						} else if (indicatorList_[pointer_qi].equals("SPREAD")) {

							value = indicators
									.getSpread(resultFront[pointer_runs][pointer_algorithms]);

						} else if (indicatorList_[pointer_qi].equals("IGD")) {

							value = indicators
									.getIGD(resultFront[pointer_runs][pointer_algorithms]);

						} else if (indicatorList_[pointer_qi].equals("EPSILON")) {

							value = indicators
									.getEpsilon(resultFront[pointer_runs][pointer_algorithms]);

						} else if (indicatorList_[pointer_qi].equals("GD")) {

							value = indicators
									.getGD(resultFront[pointer_runs][pointer_algorithms]);

						}

						os = new FileWriter(experimentDirectory + "/"
								+ indicatorList_[pointer_qi] + ".txt", true);

						os.write("" + value + "\n");

						os.close();

					} catch (IOException ex) {

						Logger.getLogger(getClass().getName()).log(
								Level.SEVERE, null, ex);

					} finally {

						try {

							os.close();

						} catch (IOException ex) {

							Logger.getLogger(getClass().getName()).log(
									Level.SEVERE, null, ex);
						}
					} // for
				} // if
			} // for
		} // for
	}

	private Problem[] configureProblem(final int problemId,
			final Algorithm[] algorithm) {

		final Problem[] problem = new Problem[problemConfigList_[problemId].length];

		for (int pointer_param = 0; pointer_param < problemConfigList_[problemId].length; pointer_param++) {

			try {
				// Parameters of the problem
				problem[pointer_param] = (new ProblemFactoryMNO()).getProblem(
						problemList_[problemId],
						problemConfigList_[problemId][pointer_param]);

			} catch (JMException ex) {
				Logger.getLogger(getClass().getName()).log(Level.SEVERE, null,
						ex);
			}
		}
		// STEP 3: check the file containing the Pareto front of the problem
		synchronized (experiment_) {

			if (indicatorList_.length > 0) {

				for (int pointer_param = 0; pointer_param < problemConfigList_[problemId].length; pointer_param++) {

					File pfFile = new File(paretoFrontFile_.get(
							problemList_[problemId]).get(
							problem[pointer_param].getNumberOfObjectives()));

					if (pfFile.exists()) {

						paretoFrontFile_.get(problemList_[problemId]).put(
								problem[pointer_param].getNumberOfObjectives(),
								pfFile.getAbsoluteFile().toString());

					} else {

						pfFile = new File(paretoFrontDirectory_
								+ "/"
								+ paretoFrontFile_.get(problemList_[problemId])
										.get(problem[pointer_param]
												.getNumberOfObjectives()));
						if (pfFile.exists()) {

							paretoFrontFile_
									.get(problemList_[problemId])
									.put(problem[pointer_param]
											.getNumberOfObjectives(),
											pfFile.getAbsoluteFile().toString());

						}

						else {

							paretoFrontFile_
									.get(problemList_[problemId])
									.put(problem[pointer_param]
											.getNumberOfObjectives(),
											"");
						}
					}
				}
			} // if

		}
		try {

			experiment_.algorithmSettings(problem, problemId, algorithm);

		} catch (ClassNotFoundException e1) {

			Logger.getLogger(getClass().getName()).log(Level.SEVERE, null, e1);

		}

		return problem;
	}
}
