package main;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.math.BigDecimal;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;

import org.joda.time.LocalDate;

import weka.classifiers.Evaluation;
import weka.classifiers.functions.MultilayerPerceptron;
import weka.core.Instances;
import data.InstancesCreator;
import data.combinations.IndexesCombinations;
import data.enums.ArchiveDescription;
import data.enums.Folder;
import data.enums.Index;
import data.input.Data;
import data.transform.DatasMap;
import entities.box_plot.TableBoxPlotByEpochs;
import entities.box_plot.TableBoxPlotByLearningRate;
import entities.box_plot.TableBoxPlotByMomentum;
import entities.box_plot.TableBoxPlotByNeurons;
import entities.box_plot.TableMlpTestsOneIndexByContinent;
import entities.tables.TableGroupedByEqualParameters;

public class Main {

	private static final int NUMBER_OF_FOLDERS = 5;

	static int[] numsNeuronios = { 2, 3, 4, 5 };
	static double[] learningsRate = { 0.2, 0.3, 0.4 };
	static double[] momentums = { 0.1, 0.2, 0.3 };
	static int firstEpoch = 400;
	static int intervalEpoch = 100;
	static int lastEpoch = 600;

	static ArchiveDescription archiveDescription = ArchiveDescription.DEFAULT_WEKA_WITH_ONE_STEP;

	public static void main(String[] args) throws Exception {

		long startTime = System.currentTimeMillis();

		//createArffs();
		//createTableMlp();

		TableMlpTestsOneIndexByContinent tableMlp = new TableMlpTestsOneIndexByContinent(archiveDescription);
		tableMlp = tableMlp.deserialize();
		createTablesWithAnSerializedTableMlp(tableMlp);

		System.out.println("rodou em " + ((System.currentTimeMillis() - startTime) / 1000) + " segundos");
		System.out.println("rodou em " + ((System.currentTimeMillis() - startTime) / (1000 * 60)) + " minutos");
		System.out.println("rodou em " + ((System.currentTimeMillis() - startTime) / (1000 * 60 * 60)) + " horas");

	}

	private static void createTablesWithAnSerializedTableMlp(TableMlpTestsOneIndexByContinent tableMlp) throws IOException {

		tableMlp.saveTableAsTxt();

		TableBoxPlotByEpochs boxPlotByEpochs = new TableBoxPlotByEpochs(tableMlp, archiveDescription);
		boxPlotByEpochs.saveTableAsTxt();

		TableBoxPlotByMomentum boxPlotByMomentum = new TableBoxPlotByMomentum(tableMlp, archiveDescription);
		boxPlotByMomentum.saveTableAsTxt();

		TableBoxPlotByNeurons boxPlotByNeurons = new TableBoxPlotByNeurons(tableMlp, archiveDescription);
		boxPlotByNeurons.saveTableAsTxt();

		TableBoxPlotByLearningRate boxPlotByLearningRate = new TableBoxPlotByLearningRate(tableMlp, archiveDescription);
		boxPlotByLearningRate.saveTableAsTxt();
		
		TableGroupedByEqualParameters groupedByParameters = new TableGroupedByEqualParameters(tableMlp, archiveDescription); 

	}

	private static void createTableMlp() throws Exception {

		File folder = new File(Folder.ONE_INDEX_BY_CONTINENT_ARFFS.toString());

		File[] inputs = folder.listFiles(new FilenameFilter() {

			public boolean accept(File dir, String name) {
				return (name.endsWith(".arff"));
			}

		});

		TableMlpTestsOneIndexByContinent tableMlp = new TableMlpTestsOneIndexByContinent(archiveDescription);

		int contador = 0;
		for (File input : inputs) {

			FileReader reader = new FileReader(input);
			Instances allInstances = new Instances(reader);
			allInstances.setClassIndex(allInstances.numAttributes() - 1);

			Evaluation evaluation = new Evaluation(allInstances);

			for (int numsEpoch = firstEpoch; numsEpoch <= lastEpoch; numsEpoch += intervalEpoch) {
				for (int numNeuronios : numsNeuronios) {
					for (double learningRate : learningsRate) {
						for (double momentum : momentums) {

							MultilayerPerceptron mlp = new MultilayerPerceptron();
							
							mlp.setHiddenLayers(numNeuronios + "");
							mlp.setLearningRate(learningRate);
							mlp.setMomentum(momentum);
							mlp.setTrainingTime(numsEpoch);

							evaluation.crossValidateModel(mlp, allInstances, NUMBER_OF_FOLDERS, new Random());

							BigDecimal percentualCorrectBigDecimal = BigDecimal.valueOf(evaluation.pctCorrect());

							tableMlp.addRow(input.getName().replace(".arff", ""), numNeuronios, learningRate, momentum, numsEpoch, percentualCorrectBigDecimal);
							System.out.println(contador++);
						}
					}
				}

			}
		}

		tableMlp.serialize();

	}

	private static void createArffs() throws ClassNotFoundException, IOException, ParseException {

		Map<Index, Data> datasMap = DatasMap.datasMap();
		Set<List<Index>> combinations = IndexesCombinations.getOneIndexByContinent();

		LocalDate initialDate = new LocalDate(2005, 06, 1);
		LocalDate finalDate = new LocalDate(2012, 4, 5);

		InstancesCreator creator = new InstancesCreator(initialDate, finalDate, datasMap, combinations);

		while (creator.hasNextInstances()) {

			Instances instances = creator.nextInstances();
			saveOneIndexByContinentArffs(instances);

		}

	}

	private static void saveOneIndexByContinentArffs(Instances instances) throws IOException {
		saveArffsInThePath(Folder.ONE_INDEX_BY_CONTINENT_ARFFS + instances.relationName().replace(", ", "_and_").replace("[", "").replace("]", "") + ".arff", instances);
	}

	private static void saveArffsInThePath(String completePath, Instances instances) throws IOException {

		BufferedWriter writer = new BufferedWriter(new FileWriter(new File(completePath)));
		writer.write(instances.toString());
		writer.flush();
		writer.close();

	}

}
