function [answer age errors]=learn(activationFunc, trainSet, expectedOuts, hiddenLayerNeuronsParam ,etha2)
    global etha;
    global epsilon;
    global beta;
    global hiddenLayerNeurons;
    hiddenLayerNeurons = hiddenLayerNeuronsParam;
    %Capas intermedias + capa de salida
    layersNumber = cols(hiddenLayerNeurons)+1;
    epsilon= 0.001;
    etha= etha2;
    beta=1;
    age = 0;
    [rowsO colsO] = size(expectedOuts);
    [rowsT colsT] = size(trainSet);
    trainSet = horzcat(trainSet, ones(rowsT, 1).*(-1));
    colsT=colsT+1;
    
    Wvec = generateWeightVector(trainSet, expectedOuts);

    totalError = 1;
    while totalError>epsilon 
        randIndex = randperm(rowsT);
        %Para todos los patrones
        for k=1:rowsT
            outputs{1} = trainSet(randIndex(k), :);
            for i=1:layersNumber
                %calculo potencial de membrana
                h{i} = Wvec{i} * outputs{i}';
                h{i} = h{i}';
                outputs{i+1} = g(activationFunc, h{i});
                outputs{i+1} = horzcat(outputs{i+1}, -1);
                %for j=1:cols(h{n})
                %    outputs{n+1}(j) = g(activationFunc, h{n}(j))
                %end
                %n = n+1;
            end
            %Delta de la capa de salida. En el algoritmo considera a la
            %entrada como una capa
            deltas{layersNumber} = firstDelta(activationFunc, h{layersNumber}, outputs{layersNumber+1}(1), expectedOuts(randIndex(k)));
            i=layersNumber-1;
            while i>=1
                Waux = Wvec{i+1}(:,1:end-1);
                deltas{i} = otherDelta(activationFunc, h{i}, Waux, deltas{i+1});
                i=i-1;
            end
            for i=1:layersNumber
               Wvec{i} = updateW(Wvec{i}, deltas{i}, outputs{i}); 
            end
            %Guardo la salida esperada para calcular el error cuadratico medio despues
            expectedOutputs(k) = expectedOuts(randIndex(k));
            %Asigno el ultimo valor de outputs que es la salida
            obtainedOutputs(k) = outputs{layersNumber+1}(1);
            %calculo el nuevo Wvec
            %Wvec = backPropagation(Wvec, n-1, activationFunc, h, outputs, expectedOutputs(k));
            
        end
        totalError = error(obtainedOutputs, expectedOutputs)
        age = age + 1
        errors(age)=totalError;
        obtainedOutputs;
        expectedOutputs;
    end
    answer=Wvec;
end

function answer = firstDelta(activationFunc, h, output, expectedOutput)
    answer = gDer(activationFunc, h)*(expectedOutput - output);
end

function answer = otherDelta(activationFunc, h, W, delta)
    'otherDelta';
    W;
    delta;
    h;
    aux1 = gDer(activationFunc, h);
    aux2 = W'*delta';
    aux2 = aux2';
    answer = aux1 .* aux2;
end

function answer=updateW(W, delta, output)
    global etha;
    W;
    delta;
    output;
    aux1 = etha.*delta';
    aux = aux1*output;
    answer = W + aux;
end

function totalError = error(outputs, expectedOutputs)
    errors = outputs - expectedOutputs;
    totalError = sum(errors.^2)*0.5;
end

function answer=generateWeightVector(trainSet, expectedOuts)
    global hiddenLayerNeurons;
    layerNeurons(1) = cols(trainSet(1,:))-1;
    for i=1:cols(hiddenLayerNeurons)
        layerNeurons(i+1) = hiddenLayerNeurons(i);
    end
    layerNeurons(cols(hiddenLayerNeurons) + 2) = cols(expectedOuts(1));
    for i=1:cols(layerNeurons)-1
        W = zeros(layerNeurons(i+1), layerNeurons(i)+1);
        answer{i} = setRandomWeights(W);
    end
end

function answer=setRandomWeights(W)
    ceil = 0.5;
    floor = -0.5;
    answer = rand(size(W))*(ceil - floor) + floor;
end

function ans=g(activationFunc, x)
    global beta;
    switch(activationFunc)
        case 'step'
            ans = sign(x);
        case 'linear'
            ans = x;
        case 'tanh'
            ans = tanh(beta*x);
    end        
end

function ans=gDer(activationFunc, x)
    global beta;
    switch(activationFunc)
        case 'tanh'
            ans = beta*(1-g(activationFunc, x).^2);
        otherwise
            ans = ones(1,cols(x));
    end    
end