package ai.neuralnet.gradientDescent;

import ai.neuralnet.NNBuilder;
import ai.neuralnet.NeuralNetwork;

public class GDNeuralNetwork extends NeuralNetwork<GDNeuron>
{

    private double learningRate = 0.002;


    public GDNeuralNetwork(NNBuilder builder)
    {
        super(builder);
    }

    public void setLearningRate(double rate)
    {
        learningRate = rate;
        for (GDNeuron n : hiddenNeurons)
        {
            n.setLearningRate(rate);
        }
        for (GDNeuron n : outputNeurons)
        {
            n.setLearningRate(rate);
        }
    }

    public void resetDeltaWeights()
    {
        for (GDNeuron n : hiddenNeurons)
        {
            n.updateDeltaWeights();
            n.resetSigmaSet();
        }
        for (GDNeuron n : outputNeurons)
        {
            n.updateDeltaWeights();
            n.resetSigmaSet();
        }
    }

    /**
     * to prepare for Batch updating later
     * compute correction contribution from this data point
     *
     * @param desiredOutputs desired values for current state of network
     */
    public void singleBackPropagation(double[] desiredOutputs)
    {


        //compute sigma for output neurons
        for (int i = 0; i < outputNeurons.length; i++)
        {
            outputNeurons[i].computeOutputSigma(desiredOutputs[i]);
        }

        //propagate sigma all the way back through hidden layer
        while (!isHiddenLayerSigmaSet())
        {
            for (GDNeuron n : hiddenNeurons)
            {
                n.tryPropagateBackward();
            }
        }

        //update weights and reset the sigmaSet for next backwards pass
        for (GDNeuron n : hiddenNeurons)
        {
            n.updateDeltaWeights();
            n.resetSigmaSet();
        }
        for (GDNeuron n : outputNeurons)
        {
            n.updateDeltaWeights();
            n.resetSigmaSet();
        }
    }


    public void updateWeightsBatch()
    {
        //update weights and reset the deltas for next update
        for (GDNeuron n : hiddenNeurons)
        {
            n.updateWeights();
            n.resetDeltaWeights();
        }
        for (GDNeuron n : outputNeurons)
        {
            n.updateWeights();
            n.resetDeltaWeights();
        }
    }

    /**
     * Used for online weight updating
     *
     * @param desiredOutputs
     */
    public void updateWeightsOnline(double[] desiredOutputs)
    {
        //compute sigma for output neurons
        for (int i = 0; i < outputNeurons.length; i++)
        {
            outputNeurons[i].computeOutputSigma(desiredOutputs[i]);
        }

        //propagate sigma all the way back through hidden layer
        while (!isHiddenLayerSigmaSet())
        {
            for (GDNeuron n : hiddenNeurons)
            {
                n.tryPropagateBackward();
            }
        }

        //update weights and reset the sigmaSet for next backwards pass
        for (GDNeuron n : hiddenNeurons)
        {
            n.updateDeltaWeights();
            n.updateWeights();
            n.resetDeltaWeights();
            n.resetSigmaSet();
        }
        for (GDNeuron n : outputNeurons)
        {
            n.updateDeltaWeights();
            n.updateWeights();
            n.resetDeltaWeights();
            n.resetSigmaSet();
        }


    }

    @Override
    public NeuralNetwork<GDNeuron> clone()
    {
        GDNeuralNetwork copy = (GDNeuralNetwork) super.clone();
        copy.setLearningRate(learningRate);
        return copy;
    }

    private boolean isHiddenLayerSigmaSet()
    {
        boolean set = true;
        for (GDNeuron neuron : hiddenNeurons)
        {
            set &= neuron.isSigmaSet();
            if (!set) break;
        }

        return set;
    }

    public double getLearningRate()
    {
        return outputNeurons[0].getLearningRate();
    }


}
