import ai.neuralnet.*;
import ai.neuralnet.builders.ConnectedMultilayerBuilder;
import ai.neuralnet.builders.DebugNetworkBuilder;
import ai.neuralnet.builders.OverlappingPyramidBuilder;
import ai.neuralnet.gen.GeneticNeuralNetwork;
import ai.neuralnet.gen.GeneticTrainer;
import ai.neuralnet.gradientDescent.GDMode;
import ai.neuralnet.gradientDescent.GDNeuralNetwork;
import ai.neuralnet.gradientDescent.GDNeuron;
import ai.neuralnet.gradientDescent.GradientDescentTrainer;
import ai.neuralnet.mixed.MixedTrainer;
import data.DataPreProcessor;
import data.NormalizationStyle;
import musicalgo.NeuralNetworkAlgorithm;
import musicalgo.VotingNNAlgorithm;

//todo: Automatic feature selection (not ai approach just based on information from data)
//todo: Voting with experts
//todo: Implement parallelism optimization for training

public class Main
{


    /**
     * Preconfigured algorithm combination index to run (0-11)
     * check main method for more info
     *
     * @see Main#main(String[])
     */
    static int algorithm = 8;


    /*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *
    *                                                               *
    * General Options common to all algorithms can be found here    *
    *                                                               *
    *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   */

    /**
     * feature selection with no pitch information
     */
    static boolean[] noPitch = new boolean[]{true, false, false, false, false, false, false, false, false, false, false, false, false,
            true, true, true, true, true, true, true, true, true, true, true, true};

    /**
     * feature selection with no timbre information
     */
    static boolean[] noTimbre = new boolean[]{true, true, true, true, true, true, true, true, true, true, true, true, true,
            false, false, false, false, false, false, false, false, false, false, false, false};

    /**
     * Feature selection that will be used for the algorithms
     * if null then all features are included
     */
    static boolean[] selectedFeatures = null;

    /**
     * If true then the algorithms will attempt to
     * automatically select the most relevant features
     */
    static boolean automaticFeatureSelection = false;


    /**
     * normalization style to use on the data
     */
    static NormalizationStyle normalizationStyle = NormalizationStyle.UnitMagnitude;

    /**
     * percent overlap of song segments when doing time series merging
     */
    static double segmentMergeOverlap = 0.1;


    /**
     * number of segments per song after data pre processing
     * [1 - 200]
     */
    static int desiredSegments = 1;

    /**
     * total number of songs to use for both training and validation.
     * [1 - 2500]
     */
    static int totalSongs = 2500;

    /**
     * percent of totalSongs that will form training set.
     * the complementary set will be used for validation
     * ]0, 1[
     */
    static double trainingPercentage = 1;

    /**
     * If the algorithms should print regular progress updates
     * when training networks
     */
    static boolean printProgress = true;

    /**
     * Number of generations or GD iterations to wait between status
     * update prints when printProgress is true
     */
    static int printPeriod = 20;

    /**
     * Suggested number of maximum iterations/generations to train a neural net for
     * certain algorithms may modify this throughout their execution
     *
     * @see MixedTrainer#trainNetworks() mixed network training
     */
    static int maxIterations = 7000;

    /**
     * Suggested target accuracy to stop training at
     * certain algorithms may modify this throughout their execution
     */
    static double targetAccuracy = 0.75;

    /**
     * Number of NeuralNetworks to train before the validation phase
     * or number of networks per family for voting
     */
    static int trainingCount = 3;

    /**
     * Minimum value of neuron input weight during random generation
     */
    static double minRandomWeight = -1;

    /**
     * maximum value of neuron input weight during random generation
     */
    static double maxRandomWeight = 1;


    /*   *   *   *   *   *   *   *   *   *   *   *   *
    *                                               *
    * Options for Voting algorithm are below here   *
    *                                               *
    *   *   *   *   *   *   *   *   *   *   *   *   */

    /**
     * The voting algorithm can cluster groups of voters into families
     * these families are of size trainingCount and will be trained on different
     * but partially overlapping subsets of songs of size
     * ~ 1.1 * trainingPercentage / families * totalSongs
     */
    static int families = 6;


    /*    *   *   *   *   *   *   *   *   *   *   *   *   *
    *                                                    *
    * Gradient Descent specific options are below here   *
    *                                                    *
    *    *   *   *   *   *   *   *   *   *   *   *   *   */


    /**
     * If true GD starts by generating a number of random networks and then picks the best one before
     * entering the GD training loop.
     */
    static boolean jumpStart = true;

    /**
     * factor representing how large the initial random pool of networks is when using jump start on GD
     */
    static int jumpFactor = 2;

    /**
     * Learning rate for GD algorithm
     */
    static double learningRate = 0.2;

    /**
     * If true the learning rate will slowly decrease over time.
     * The intuition is that at first you can jump around, but
     * towards the end you want things to converge
     */
    static boolean reduceLR = false;

    /**
     * Initial period (number of iterations) after which learning rate is reduced if
     * reduceLR is true. (the algorithm lengthens this period by 10% after each reduction
     */
    static int lrDecrementPeriod = 200;

    /**
     * If true, the algorithm will try to detect when we are stalled at a local minimum
     * and will stop training even if the max iterations and target accuracy have not been reached
     */
    static boolean stopOnNoProgress = false;

    /**
     * Number of iterations between checks for progress.
     */
    static int progressCheckPeriod = 150;


    /**
     * Update strategy for gradient descent
     * either batch or online.
     */
    static GDMode gdUpdateMode = GDMode.Batch;


    /*    *   *   *   *   *   *   *   *   *   *   *   *   *
*                                                    *
* Genetic Algorithm training options are below here  *
*                                                    *
*    *   *   *   *   *   *   *   *   *   *   *   *   */

    /**
     * Size of population groups. The algorithm maintains the population
     * at this level during evolution (or next even number if odd)
     */
    private static int populationSize = 10;

    /**
     * Percentage of population that will choose to have children
     * the algorithm tries to create as many children as there are people
     * in the previous generation regardless of this factor. Each couple has the same number of children
     */
    static double breederPercent = 0.6;

    /**
     * Percentage of previous generation that dies off after new generation is born
     * child mortality rate correlates to this to keep population level stable
     */
    static double mortalityRate = 0.6;

    /**
     * If true best individual is always passed to next generation
     */
    static boolean elitism = true;

    /**
     * if an individual has selection probability p then the next
     * best individual has selection probability selectionScalar * p
     */
    static double selectionScalar = 0.85;


    /**
     * identifies the crossover operator to use on GeneticNeuralNetworks
     */
    static GeneticNeuralNetwork.CrossoverStyle crossoverStyle = GeneticNeuralNetwork.CrossoverStyle.PickWeight;

    /**
     * identifies the mutation operator to use on GeneticNeuralNetworks
     */
    static GeneticNeuralNetwork.MutationStyle mutationStyle = GeneticNeuralNetwork.MutationStyle.Node;


    /*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *
*                                                                   *
* Options specific to FCM Network Topology can be found below here  *
*                                                                   *
*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   */

    /**
     * For fully connected multilayer networks
     * this represents the number of hidden layers
     */
    static int hiddenLayerCount = 1;

    /**
     * for fully connected multilayer networks
     * this is the hidden layer size
     */
    static int hiddenLayerSize = 20;

    /*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *
*                                                                       *
* Options specific to Pyramid Network Topology can be found below here  *
*                                                                       *
*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   */

    /**
     * for overlapping pyramid networks
     * the sizes of the hidden layers ordered from input to output
     */
    static int[] layerSizes = new int[]{40, 10, 1};

    /**
     * for overlapping pyramid networks the logical groupings of the nodes from layer to layer
     * overlap sizes will always be multiples of this logical grouping.
     * these are ordered as the layer sizes are
     */
    static int[] logicalGroups = new int[]{25, 1, 1};

    /**
     * for overlapping pyramid networks, the number of hidden layers
     * to connect to the output neurons, counted from the output backwards
     * these hidden layers are fully connected to the output nodes.
     */
    static int layersToOutput = 2;

    /**
     * desired percentage of overlap wrt to default non overlapping size.
     * of course actual percentage is forced by the logical grouping of the previous layer
     * but it will try to remain as close as possible to the target while respecting the logical groups
     * and doing at least 1 logical group worth of overlap on either side.
     */
    static double pyramidPercentOverlap = 0.1;


    //----------------------- end of options -------------------------


    /**
     * Main routine is just switch between different algorithms
     * <p/>
     * Information about the algorithm method names:
     * <p/>
     * NN is neural network algorithm<br/>
     * VNN is voting neural network algorithm<br/>
     * <p/>
     * GD means gradient descent training<br/>
     * GEN means genetic algorithm training<br/>
     * MIX means mixed training<br/>
     * <p/>
     * FCM means a fully connected multilayer Network (allthough the number of hidden layers is most often set to 1)<br/>
     * PY means sparsely connected pyramid network (see the OverlappingPyramidBuilder class for more details on topology)<br/>
     *
     * @param args ignored
     */
    public static void main(String[] args)
    {

        DataPreProcessor pp = new DataPreProcessor("trainx.txt", 500);
        pp.setMoments(1);
        pp.setAddFeatures(true);
        pp.setDataPointCount(1);
        pp.setNormalizationStyle(NormalizationStyle.UnitMagnitude);
        pp.setPercentOverlap(0.1);
        pp.processData();
        pp.getProcessedDataSet();

        
        
        switch (algorithm)
        {
            case 0:
                run_NN_GD_FCM();
                break;
            case 1:
                run_NN_GEN_FCM();
                break;
            case 2:
                run_NN_MIX_FCM();
                break;
            case 3:
                run_NN_GD_PY();
                break;
            case 4:
                run_NN_GEN_PY();
                break;
            case 5:
                run_NN_MIX_PY();
                break;
            case 6:
                run_VNN_GD_FCM();
                break;
            case 7:
                run_VNN_GEN_FCM();
                break;
            case 8:
                run_VNN_MIX_FCM();
                break;
            case 9:
                run_VNN_GD_PY();
                break;
            case 10:
                run_VNN_GEN_PY();
                break;
            case 11:
                run_VNN_MIX_PY();
                break;
        }

    }


    private static int getDimensionality()
    {
        if (selectedFeatures == null) return 25;

        int finalDimension = 0;
        int d = 0;
        while (d < selectedFeatures.length)
            finalDimension += selectedFeatures[d++] ? 1 : 0;

        return finalDimension;

    }


    /*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *
*                                                               *
* Initialization methods are below here                         *
*                                                               *
*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   */


    private static void initializeNNAlgorithm(NeuralNetworkAlgorithm algo)
    {
        algo.setTrainingPercentage(trainingPercentage);
        algo.setDesiredSegmentCount(desiredSegments);
        algo.setNormalizationStyle(normalizationStyle);
        algo.setIncludedFeatures(selectedFeatures);
        algo.setDesiredSongCount(totalSongs);
        algo.setPercentOverlap(segmentMergeOverlap);
    }

    private static void initializeVNNAlgorithm(VotingNNAlgorithm algo)
    {
        //initialize common settings
        initializeNNAlgorithm(algo);
        //set voting specific settings
        algo.setFamilies(families);
    }

    private static void initializeTrainer(NNTrainer<? extends NeuralNetwork> trainer)
    {
        trainer.setTrainingCount(trainingCount);
        trainer.setTargetAccuracy(targetAccuracy);
        trainer.setMaxIterations(maxIterations);
        trainer.setPrintPeriod(printPeriod);
        trainer.setPrintTrainingProgress(printProgress);
        trainer.setMinRandomWeight(minRandomWeight);
        trainer.setMaxRandomWeight(maxRandomWeight);

    }

    private static void initializeGDTrainer(GradientDescentTrainer trainer)
    {
        //initialize common settings
        initializeTrainer(trainer);
        //initialize GD specific settings
        trainer.setJumpFactor(jumpFactor);
        trainer.setJumpStart(jumpStart);
        trainer.setLearningRate(learningRate);
        trainer.setReduceLR(reduceLR);
        trainer.setLrDecrementPeriod(lrDecrementPeriod);
        trainer.setStopOnNoProgress(stopOnNoProgress);
        trainer.setNoProgressPeriod(progressCheckPeriod);
        trainer.setUpdateMode(gdUpdateMode);
    }

    private static void initializeGenTrainer(GeneticTrainer trainer)
    {
        //initialize common settings
        initializeTrainer(trainer);
        //initialize genetic specific settings
        trainer.setBreederPercent(breederPercent);
        trainer.setElitism(elitism);
        trainer.setSelectionScalar(selectionScalar);
        trainer.setCrossoverStyle(crossoverStyle);
        trainer.setMutationStyle(mutationStyle);
        trainer.setPopulationSize(populationSize);
        trainer.setMortalityRate(mortalityRate);


    }

    private static void initializeMixedTrainer(MixedTrainer trainer)
    {
        //initialize common settings
        initializeTrainer(trainer);
        //initialize GD specific settings
        trainer.setJumpFactor(jumpFactor);
        trainer.setJumpStart(jumpStart);
        trainer.setLearningRate(learningRate);
        trainer.setReduceLR(reduceLR);
        trainer.setLrDecrementPeriod(lrDecrementPeriod);
        trainer.setStopOnNoProgress(stopOnNoProgress);
        trainer.setNoProgressPeriod(progressCheckPeriod);
        trainer.setUpdateMode(gdUpdateMode);
        //initialize genetic specific settings
        trainer.setBreederPercent(breederPercent);
        trainer.setElitism(elitism);
        trainer.setSelectionScalar(selectionScalar);
        trainer.setCrossoverStyle(crossoverStyle);
        trainer.setMutationStyle(mutationStyle);
        trainer.setPopulationSize(populationSize);
    }


    private static void initializeBuilder(NNBuilder builder)
    {
        builder.setInputCount(desiredSegments * getDimensionality());
        builder.setOutputCount(5);
    }


    private static void initializeFCMBuilder(ConnectedMultilayerBuilder builder)
    {
        //initialize common options
        initializeBuilder(builder);
        //initialize FCM specific options
        builder.setHiddenLayers(hiddenLayerCount);
        builder.setHiddenLayerSize(hiddenLayerSize);

    }

    private static void initializePYBuilder(OverlappingPyramidBuilder builder)
    {
        //initialize common options
        initializeBuilder(builder);
        //initialize pyramid specific options
        builder.setLayerSizes(layerSizes);
        builder.setLayersToOutput(layersToOutput);
        builder.setLogicalGroups(logicalGroups);
        builder.setPercentOverlap(pyramidPercentOverlap);
    }


    /*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *
*                                                               *
* methods for running various algorithm combinations are below  *
*                                                               *
*   *   *   *   *   *   *   *   *   *   *   *   *   *   *   *   */


    /**
     * Neural network algorithm
     * Gradient descent training
     * Fully connected multilayer network
     */
    private static void run_NN_GD_FCM()
    {
        ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork> builder =
                new ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializeFCMBuilder(builder);

        GradientDescentTrainer trainer = new GradientDescentTrainer(builder);
        initializeGDTrainer(trainer);

        NeuralNetworkAlgorithm algo = new NeuralNetworkAlgorithm(trainer);
        initializeNNAlgorithm(algo);

        algo.run();
    }

    /**
     * Voting Neural Network Alg
     * Gradient Descent
     * Fully Connected Multilayer network
     */
    private static void run_VNN_GD_FCM()
    {
        ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork> builder =
                new ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializeFCMBuilder(builder);

        GradientDescentTrainer trainer = new GradientDescentTrainer(builder);
        initializeGDTrainer(trainer);

        VotingNNAlgorithm algo = new VotingNNAlgorithm(trainer);
        initializeVNNAlgorithm(algo);

        algo.run();
    }


    /**
     * Neural Network
     * Genetic algorithm training
     * Fully connected Multilayer network
     */
    private static void run_NN_GEN_FCM()
    {
        ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork> builder =
                new ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializeFCMBuilder(builder);

        GeneticTrainer trainer = new GeneticTrainer(builder);
        initializeGenTrainer(trainer);

        NeuralNetworkAlgorithm algo = new NeuralNetworkAlgorithm(trainer);
        initializeNNAlgorithm(algo);

        algo.run();
    }

    /**
     * Neural Network
     * Genetic algorithm training
     * Fully connected Multilayer network
     */
    private static void run_NN_GEN_PY()
    {
        OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork> builder =
                new OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializePYBuilder(builder);

        GeneticTrainer trainer = new GeneticTrainer(builder);
        initializeGenTrainer(trainer);

        NeuralNetworkAlgorithm algo = new NeuralNetworkAlgorithm(trainer);
        initializeNNAlgorithm(algo);

        algo.run();
    }


    /**
     * Neural Network
     * Genetic algorithm training
     * Fully connected Multilayer network
     */
    private static void run_NN_MIX_FCM()
    {
        ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork> gbuilder =
                new ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializeFCMBuilder(gbuilder);

        ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork> gdbuilder =
                new ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializeFCMBuilder(gdbuilder);

        MixedTrainer trainer = new MixedTrainer(gbuilder, gdbuilder);
        initializeMixedTrainer(trainer);

        NeuralNetworkAlgorithm algo = new NeuralNetworkAlgorithm(trainer);
        initializeNNAlgorithm(algo);

        algo.run();
    }

    /**
     * Neural network algorithm
     * Mixed trainer
     * Pyramid network
     */
    private static void run_NN_MIX_PY()
    {
        OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork> gdBuilder =
                new OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializePYBuilder(gdBuilder);

        OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork> genBuilder =
                new OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializePYBuilder(genBuilder);

        MixedTrainer trainer = new MixedTrainer(genBuilder, gdBuilder);
        initializeMixedTrainer(trainer);

        NeuralNetworkAlgorithm algo = new NeuralNetworkAlgorithm(trainer);
        initializeNNAlgorithm(algo);

        algo.run();
    }

    private static void run_VNN_GEN_FCM()
    {
        ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork> builder =
                new ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializeFCMBuilder(builder);

        GeneticTrainer trainer = new GeneticTrainer(builder);
        initializeGenTrainer(trainer);

        VotingNNAlgorithm algo = new VotingNNAlgorithm(trainer);
        initializeVNNAlgorithm(algo);

        algo.run();
    }


    private static void run_VNN_MIX_FCM()
    {
        ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork> gbuilder =
                new ConnectedMultilayerBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializeFCMBuilder(gbuilder);

        ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork> gdbuilder =
                new ConnectedMultilayerBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializeFCMBuilder(gdbuilder);

        MixedTrainer trainer = new MixedTrainer(gbuilder, gdbuilder);
        initializeMixedTrainer(trainer);

        VotingNNAlgorithm algo = new VotingNNAlgorithm(trainer);
        initializeVNNAlgorithm(algo);

        algo.run();
    }


    private static void run_VNN_GD_PY()
    {
        OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork> builder =
                new OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializePYBuilder(builder);

        GradientDescentTrainer trainer = new GradientDescentTrainer(builder);
        initializeGDTrainer(trainer);

        VotingNNAlgorithm algo = new VotingNNAlgorithm(trainer);
        initializeVNNAlgorithm(algo);

        algo.run();
    }


    private static void run_VNN_GEN_PY()
    {
        OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork> builder =
                new OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializePYBuilder(builder);

        GeneticTrainer trainer = new GeneticTrainer(builder);
        initializeGenTrainer(trainer);

        VotingNNAlgorithm algo = new VotingNNAlgorithm(trainer);
        initializeVNNAlgorithm(algo);

        algo.run();
    }

    private static void run_VNN_MIX_PY()
    {
        OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork> gdBuilder =
                new OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializePYBuilder(gdBuilder);

        OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork> genBuilder =
                new OverlappingPyramidBuilder<Neuron, GeneticNeuralNetwork>(Neuron.class, GeneticNeuralNetwork.class);
        initializePYBuilder(genBuilder);

        MixedTrainer trainer = new MixedTrainer(genBuilder, gdBuilder);
        initializeMixedTrainer(trainer);

        VotingNNAlgorithm algo = new VotingNNAlgorithm(trainer);
        initializeVNNAlgorithm(algo);

        algo.run();
    }


    /**
     * Neural Network
     * Gradient descent training
     * overlapping pyramid graph structure
     */
    private static void run_NN_GD_PY()
    {
        OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork> builder =
                new OverlappingPyramidBuilder<GDNeuron, GDNeuralNetwork>(GDNeuron.class, GDNeuralNetwork.class);
        initializePYBuilder(builder);

        GradientDescentTrainer trainer = new GradientDescentTrainer(builder);
        initializeGDTrainer(trainer);

        NeuralNetworkAlgorithm algo = new NeuralNetworkAlgorithm(trainer);
        initializeNNAlgorithm(algo);

        algo.run();
    }

    //debug method makes network from one of the assignments to check if gradient descent is working properly
    private static void debug()
    {
        DebugNetworkBuilder builder = new DebugNetworkBuilder();

        ClassificationDataSet set = new ClassificationDataSet();
        set.addInputOutputPair(new double[]{0, 1}, new double[]{1, 0, 0});

        GradientDescentTrainer trainer = new GradientDescentTrainer(builder);
        trainer.setTrainingSet(set);
        trainer.setLearningRate(0.4);

        trainer.trainNetworks();

    }
}
