package ai.neuralnet.gradientDescent;

import ai.neuralnet.NNBuilder;
import ai.neuralnet.NNTrainer;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

/**
 * Uses gradient descent to train networks with random weights till target accuracy
 * then uses validation data set to pick network that generalizes best.
 */
public class GradientDescentTrainer extends NNTrainer<GDNeuralNetwork>
{

    private double learningRate = 0.002;

    private boolean stopOnNoProgress = false;

    private int noProgressPeriod = 100;

    private GDMode updateMode = GDMode.Batch;

    private boolean reduceLR = false;

    private boolean jumpStart = true;

    private int jumpFactor = 10;

    private int lrDecrementPeriod = 300;

    public GradientDescentTrainer(NNBuilder<GDNeuron, GDNeuralNetwork> networkBuilder)
    {
        super(networkBuilder);
    }

    public void initNetwork(GDNeuralNetwork network)
    {
        network.setLearningRate(learningRate);
    }

    @Override
    public void trainNetworks()
    {

        networks.clear();

        if (jumpStart)
        {
            List<GDNeuralNetwork> pool = createRandomNetworks(trainingCount * jumpFactor);
            for (GDNeuralNetwork network : pool)
            {
                network.setAccScore(trainingSet.getAccuracy(network)[0]);
            }
            Collections.sort(pool);


            networks = new ArrayList<GDNeuralNetwork>(trainingCount);

            for (int i = 0; i < trainingCount; i++)
            {
                networks.add(pool.get(i));
            }
        }
        else
        {
            networks = createRandomNetworks(trainingCount);
        }


        int i = 1;
        for (GDNeuralNetwork n : networks)
        {
            double[] info = train(n);
            System.out.println("Network " + (i++) + " trained, " + info[2] + " iterations. Final " + statsString(info));
        }
    }


    public double[] train(GDNeuralNetwork network)
    {
        initNetwork(network);
        int drate = lrDecrementPeriod;
        double[] accuracy = trainingSet.getAccuracy(network);

        //  System.out.println("Training new Network with Gradient Descent Initial " + statsString(accuracy));
        double veryOldAccuracy = accuracy[0];
        int iteration = 0;
        long startTime = System.currentTimeMillis();
        while (accuracy[0] < targetAccuracy && iteration < maxIterations)
        {
            iteration++;
            int bgCorrect = 0, cCorrect = 0;
            for (int i = 0; i < trainingSet.size(); i++)
            {

                network.propagateInputs(trainingSet.getInputValues().get(i));
                //get accuracy as we go along, since we are iterating anyway
                int cat[] = trainingSet.predictedCategory(network, trainingSet.getInputValues().get(i));
                if (trainingSet.getOutputValues().get(i)[cat[0]] == 1) bgCorrect++;
                if (cat[1] != -1 && trainingSet.getOutputValues().get(i)[cat[1]] == 1) cCorrect++;

                switch (updateMode)
                {
                    case Online:
                    {
                        network.updateWeightsOnline(trainingSet.getOutputValues().get(i));
                        break;
                    }
                    case Batch:
                    {
                        network.singleBackPropagation(trainingSet.getOutputValues().get(i));
                        break;
                    }
                }
            }

            accuracy = new double[]{bgCorrect / (double) trainingSet.size(), cCorrect / (double) trainingSet.size()};

            if (updateMode == GDMode.Batch)
            {
                network.updateWeightsBatch();
            }

            network.setAccScore(accuracy[0]);

            if (reduceLR && iteration % drate == 0)
            {
                double lr = network.getLearningRate();
                network.setLearningRate(lr * 0.9);
                drate *= 1.1;
            }

            if (iteration % noProgressPeriod == 0)
            {
                if (accuracy[0] - veryOldAccuracy < 0.001 && stopOnNoProgress)
                {
                    break;
                }


                veryOldAccuracy = network.getAccScore();
            }


            if (printTrainingProgress && iteration % printFrequency == 0)
                System.out.println("time: " + (System.currentTimeMillis() - startTime) + " iteration: " + iteration + " " + statsString(accuracy));
        }

        return new double[]{accuracy[0], accuracy[1], iteration};
    }


    public void setReduceLR(boolean reduceLR)
    {
        this.reduceLR = reduceLR;
    }

    public void setUpdateMode(GDMode updateMode)
    {
        this.updateMode = updateMode;
    }

    public void setLearningRate(double learningRate)
    {
        this.learningRate = learningRate;
    }

    public void setLrDecrementPeriod(int lrDecrementPeriod)
    {
        this.lrDecrementPeriod = lrDecrementPeriod;
    }

    public void setJumpFactor(int jumpFactor)
    {
        this.jumpFactor = jumpFactor;
    }

    public void setJumpStart(boolean jumpStart)
    {
        this.jumpStart = jumpStart;
    }

    public void setNoProgressPeriod(int noProgressPeriod)
    {
        this.noProgressPeriod = noProgressPeriod;
    }

    public void setStopOnNoProgress(boolean stopOnNoProgress)
    {
        this.stopOnNoProgress = stopOnNoProgress;
    }

    public double getLearningRate()
    {
        return learningRate;
    }
}
