function [ net ] = backPropagation( net, input, Vs, activationFn, activationDerivate, BETA)
%BACKPROPAGATION Summary of this function goes here
%   Detailed explanation goes here
global LEARNING_RATE;
global MOMENTUM;
global OLD_DELTA_W;


layers = size(net,2);


%output layer
s_lay = size(Vs{layers},1)-1;
s_lay_1 = size(Vs{layers+1},1)-1;
h = net{layers}*Vs{layers};
deltaOutput = outputDelta(h, Vs{layers+1}(1:s_lay_1,:), input, activationDerivate, BETA);




%hidden layers
deltas{layers}= deltaOutput;
for i=(layers-1):-1:1
    h= net{i}*Vs{i};
    deltas{i} = hiddenDelta(h, net{i+1}, deltas{i+1}, activationDerivate, BETA);
end

%modificar los pesos de las capas
%para cada capa (desde la salida hasta la entrada)
for m=layers:-1:1
    %para cada fila de la capa m
    for i=i:size(net{m},1)
        %para cada columna de la fila i de la capa m
        for j=1:size(net{m},2)
            deltaW{m}(i,j)=LEARNING_RATE * deltas{m}(i) * Vs{m}(j);
            deltaW{m}(i,j) = deltaW{m}(i,j) - OLD_DELTA_W{m}(i,j) * MOMENTUM;
            net{m}(i,j) =  net{m}(i,j) + deltaW{m}(i,j);
            OLD_DELTA_W{m}(i,j) = deltaW{m}(i,j);
        end
    end
  
end








