package pl.edu.agh.cp.learning;

import pl.edu.agh.neural.core.ConnectedNeuron;
import pl.edu.agh.neural.simple.InputConnection;
import pl.edu.agh.neural.simple.activation.IActivationFunction;
import pl.edu.agh.som.Node;
import pl.edu.agh.som.learning.LinearLearning;

/**
 * Created with IntelliJ IDEA.
 * User: Aurvandil
 * Date: 19.01.13
 * Time: 17:17
 * To change this template use File | Settings | File Templates.
 */
public abstract class BaseTeacherLearningFunction implements ITeacherLearningFunction{

    protected IActivationFunction activationFunction;
    protected int learningSteps;

    protected BaseTeacherLearningFunction(IActivationFunction activationFunction, int learningSteps)
    {
        this.activationFunction = activationFunction;
        this.learningSteps = learningSteps;
    }

    @Override
    public void teach(int learningStep, ConnectedNeuron[] currentLayerNeurons, double[] teachingValues, Node kohonenWinner)
    {
        int i = 0;
        for (ConnectedNeuron neuron : currentLayerNeurons)
        {
            double y = neuron.evaluate();
            double dy = neuron.getDerivative();
            for (InputConnection connection : neuron.getInputConnections())
            {
                if (connection.getNeuron() != kohonenWinner)
                {
                    continue;
                }
                double v = connection.getWeight();
                double h = getLearningSpeed(learningStep);
                double z = teachingValues[i];
                double k = connection.getNeuron().getValue();


                double newWeight = v + h * (z - y) * dy * k;
                connection.setWeight(newWeight);
            }
            i++;
        }
    }

    @Override
    public IActivationFunction getActivationFunction() {
        return activationFunction;
    }

    private double getLearningSpeed(int learningStep)
    {
        LinearLearning l = new LinearLearning(0.06, learningSteps);

        return l.evaluate(learningStep);
//
//        if (learningStep < 2000)
//        {
//            return 0.3;
//        }
//        else if (learningStep < 12000)
//        {
//            return 0.15;
//        }
//        else if (learningStep < 32000)
//        {
//            return 0.075;
//        }
//        else if (learningStep < 72000)
//        {
//            return 0.01875;
//        }
//        return 0.00117;
    }

}
