package ai.neuralnet.mixed;

import ai.genetic.Individual;
import ai.neuralnet.ClassificationDataSet;
import ai.neuralnet.NNBuilder;
import ai.neuralnet.Neuron;
import ai.neuralnet.gen.GeneticNeuralNetwork;
import ai.neuralnet.gen.GeneticTrainer;
import ai.neuralnet.gradientDescent.GDMode;
import ai.neuralnet.gradientDescent.GDNeuralNetwork;
import ai.neuralnet.gradientDescent.GDNeuron;
import ai.neuralnet.gradientDescent.GradientDescentTrainer;

import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;

/**
 * uses both genetic and gradient descent in hopes of combining the speed
 * of gradient descent with the flexibility of the genetic algorithm
 */
public class MixedTrainer extends GeneticTrainer
{
    private GradientDescentTrainer gdTrainer;
    private NNBuilder<GDNeuron, GDNeuralNetwork> gdbuilder;


    public MixedTrainer(NNBuilder<Neuron, GeneticNeuralNetwork> genbuilder, NNBuilder<GDNeuron, GDNeuralNetwork> gdbuilder)
    {
        super(genbuilder);
        this.gdbuilder = gdbuilder;
        gdTrainer = new GradientDescentTrainer(gdbuilder);
    }

    @Override
    public void trainNetworks()
    {

        double targetAcc = targetAccuracy;
        networks.clear();

        /*
        * Step 1: create a large pool of random networks
        * these will be trained for a  short amount of time
        * on the genetic trainer
        * */

        System.out.println("Step 1: create initial pool with GA");
        evolver.setMaxGenerations(maxIterations / 2);
        targetAccuracy = 0.5;

        evolver.setPrintProgress(printTrainingProgress);

        int populationCount = Math.max(populationSize, trainingCount * 2);

        Collection<Individual<GeneticNeuralNetwork>> population = new ArrayList<Individual<GeneticNeuralNetwork>>(createRandomNetworks(populationCount));

        for (Individual<GeneticNeuralNetwork> p : population)
        {
            initNetwork(p.getObject());
        }

        evolver.setPopulation(population);

        List<GeneticNeuralNetwork> evolved = evolver.evolveGroup(trainingCount - 1);

        Collections.sort(evolved);

        /*
        * Step 2: the best networks from the first evolution phase
        * are trained further using gradient descent for a moderate length of time
        *
        * */

        System.out.println("Step 2: use GD on best members of Pool");

        gdTrainer.setMaxIterations(maxIterations);
        gdTrainer.setStopOnNoProgress(true);
        gdTrainer.setNoProgressPeriod(Math.max(150, maxIterations / 6));
        gdTrainer.setTargetAccuracy(targetAcc);


        for (int i = 0; i < trainingCount; i++)
        {
            GDNeuralNetwork gdnet = (GDNeuralNetwork) evolved.get(i).transform(gdbuilder);


            double[] info = gdTrainer.train(gdnet);

            System.out.println("Network " + (i + 1) + " trained, " + info[2] + " iterations.  GD " + statsString(info));

            GeneticNeuralNetwork geneticCopy = (GeneticNeuralNetwork) gdnet.transform(networkBuilder);
            initNetwork(geneticCopy);
            networks.add(geneticCopy);
        }

//
//        /*
//         * Step 3: the remaining networks have probably reached some local minima during gradient descent
//         * or have come close. We now do a fairly long run of genetic evolution on these same networks. in
//         * order to unstick them from the local minima
//         */
//
//        System.out.println("Step 3: GA on same set as step2 for longer");
//
//
//        evolver.setPopulation(networks);
//        evolver.setMaxGenerations((int) (maxIterations / 1.5));
//
//        evolved = evolver.evolveGroup();
//        Collections.sort(evolved); //put them best first
//
//
//        /*
//       * Step 4: final tweaking using gradient descent to reach the nearest local min
//       * slower learning rate
//       * */
//        System.out.println("Step 4: final GD tweak with low LR");
//
//
//        gdTrainer.setMaxIterations(maxIterations / 2);
//        gdTrainer.setStopOnNoProgress(false);
//        gdTrainer.setTargetAccuracy(targetAccuracy);
//        double lr = gdTrainer.getLearningRate();
//        gdTrainer.setLearningRate(lr / 3);
//        networks.clear();
//
//
//        for (int i = 0; i < trainingCount; i++)
//        {
//            GDNeuralNetwork gdnet = (GDNeuralNetwork) evolved.get(i).transform(gdbuilder);
//
//            double[] info = gdTrainer.train(gdnet);
//
//            System.out.println("Network " + (i + 1) + " trained, "+info[2]+" iterations.  GD " + statsString(info));
//
//            networks.add((GeneticNeuralNetwork) gdnet.transform(networkBuilder));
//        }


    }


    public void setReduceLR(boolean reduceLR)
    {
        gdTrainer.setReduceLR(reduceLR);
    }

    public void setUpdateMode(GDMode updateMode)
    {
        gdTrainer.setUpdateMode(updateMode);
    }

    public void setLearningRate(double learningRate)
    {
        gdTrainer.setLearningRate(learningRate);
    }

    @Override
    public void setValidationSet(ClassificationDataSet validationSet)
    {
        super.setValidationSet(validationSet);
        gdTrainer.setValidationSet(validationSet);
    }

    @Override
    public void setTrainingSet(ClassificationDataSet trainingSet)
    {
        super.setTrainingSet(trainingSet);
        gdTrainer.setTrainingSet(trainingSet);
    }

    @Override
    public void setTargetAccuracy(double targetAccuracy)
    {
        super.setTargetAccuracy(targetAccuracy);
        gdTrainer.setTargetAccuracy(targetAccuracy);
    }

    @Override
    public void setPrintTrainingProgress(boolean printTrainingProgress)
    {
        super.setPrintTrainingProgress(printTrainingProgress);
        gdTrainer.setPrintTrainingProgress(printTrainingProgress);
    }

    @Override
    public void setMinRandomWeight(double minRandomWeight)
    {
        super.setMinRandomWeight(minRandomWeight);
        gdTrainer.setMinRandomWeight(minRandomWeight);
    }

    @Override
    public void setMaxRandomWeight(double maxRandomWeight)
    {
        super.setMaxRandomWeight(maxRandomWeight);
        gdTrainer.setMaxRandomWeight(maxRandomWeight);
    }

    public void setJumpFactor(int jumpFactor)
    {
        gdTrainer.setJumpFactor(jumpFactor);
    }

    public void setJumpStart(boolean jumpStart)
    {
        gdTrainer.setJumpStart(jumpStart);
    }

    public void setLrDecrementPeriod(int lrDecrementPeriod)
    {
        gdTrainer.setLrDecrementPeriod(lrDecrementPeriod);
    }

    public void setStopOnNoProgress(boolean stopOnNoProgress)
    {
        gdTrainer.setStopOnNoProgress(stopOnNoProgress);
    }

    public void setNoProgressPeriod(int noProgressPeriod)
    {
        gdTrainer.setNoProgressPeriod(noProgressPeriod);
    }
}
