package backpropagationv1;

public class BP {
    /*CONSTRUCTORES*/
    public BP(Neurona[] _NN, Neurona _NNS){
        this.NN = _NN;
        this.NNS = _NNS;
    }

    /*METODOS*/
    public void trainer(float[] in, float out){
        this.T = out;
        this.I = in;
        this.O = new float[NN.length];

        for(int i=0; i<NN.length; i++) this.O[i] = NN[i].f(in);
         
        for(int i=1; i>=0; i--) delta(i);
        
        updateW();
    }

    private void delta(int layer){
        if(layer == lmax){
            d_last[0]  = (T-O[0])*O[0]*(1-O[0]);
            DW_last[0] = mu*d_last[0]*O[0];
        }
        else if(layer <= lmax){
            for(int i=0; i<3; i++){
                d_new[i] = (d_last[0]*NN[0].w[i])*I[i]*(1-I[i]);
                DW_new[i] = mu*d_new[i]*O[0];
            }
        }
    }

    public void updateW(){
    NNS.w[0] = NNS.w[0]+DW_last[0];
    for(int i=0; i<3; i++) NN[0].w[i] = NN[0].w[i]+DW_new[i];
    }

    /*VARIABLES GLOBALES DE LA CLASE*/
    private float T;
    private float d_last[]= new float[1];
    private float d_new[] = new float[3];
    private float I[];
    private float O[];
    private float S[];
    private float DW_last[] = new float[1];
    private float DW_new[]  = new float[3];
    private int lmax = 1;
    private Neurona[] NN;
    private Neurona NNS;
    private float mu = (float) 0.01;
}