function [answer age errors WvecMin totalErrorMin]=learn(maxAges, Wvec, trainSet, expectedOuts, hiddenLayerNeuronsParam, etha2)

    global etha;
    global epsilon;
    global beta;
    global hiddenLayerNeurons;
    global AdaptativeEthaParameterA;
    global AdaptativeEthaParameterB;
    global alpha;
    
    %estaba en 0.1
    AdaptativeEthaParameterA = 0.2;
    AdaptativeEthaParameterB = 0.1;
    
    hiddenLayerNeurons = hiddenLayerNeuronsParam;
    %Capas intermedias + capa de salida
    layersNumber = cols(hiddenLayerNeurons)+1;
    epsilon= 0.01;
    etha= etha2;
    alpha=0.5;
    beta=0.25;
    age = 0;

    totalErrorMin = 100;
    totalError = 1;
    totalErrorAnt = 1;
%    totalTestErrorAnt = 1;
%    minTotalTestError = 1;
    decrease = 0;
    firstTime=1;

    [rowsT colsT] = size(trainSet);
    %[rowsO colsO] = size(expectedOuts);
    %agrego el nodo con arista de peso -1
%    trainSet = horzcat(trainSet, ones(rowsT, 1).*(-1));
    %colsT=colsT+1;
    %matriz de pesos
%    momentum = generateDeltasWeightVector(trainSet, expectedOuts);

    %Inicializaciones para aumentar la velocidad del algoritmo
    outputs = cell(1,rowsT);
    h = cell(1,layersNumber);
    deltas = cell(1,layersNumber);
    momentum = cell(1,layersNumber);
    expectedOutputs = zeros(1,rowsT);
    obtainedOutputs = zeros(1,rowsT);
    
    while age <= maxAges
        randIndex = randperm(rowsT);
        for k=1:rowsT
            %Inicializo el output con el primer valor que encontr? de
            %entrada
            outputs{1} = trainSet(randIndex(k), :);
            %para todos los niveles
            for i=1:layersNumber
                %calculo potencial de membrana
                h{i} = Wvec{i} * outputs{i}';
                h{i} = h{i}';
                outputs{i+1} = g(h{i});
                %OJO! A la ?ltima capa, o sea la salida posta le estoy
                %asignando un -1 tmb, tenerlo en cuenta!
                outputs{i+1} = horzcat(outputs{i+1}, -1);
            end
            %-------OK!!
            
            %El algoritmo considera a la entrada como una capa
            %Delta de la capa de salida. 
            auxOutputs = outputs{layersNumber+1}(1);   
                
            deltas{layersNumber} = firstDelta(h{layersNumber}, auxOutputs, expectedOuts(randIndex(k)));
            i=layersNumber-1;
            %Delta de las demas capas
            while i>=1
                Waux = Wvec{i+1}(:,1:end-1);
                deltas{i} = otherDelta(h{i}, Waux, deltas{i+1});
                i=i-1;
            end
            %Actualizacion de los pesos
            for i=1:layersNumber
               [Wvec{i} momentum{i}] = updateW(Wvec{i}, deltas{i}, outputs{i}, momentum{i}, firstTime); 
            end
            firstTime=0;    
            %Guardo la salida esperada para calcular el error cuadratico medio despues
            expectedOutputs(k) = expectedOuts(randIndex(k));
            %Asigno el valor obtenido en la salida
            obtainedOutputs(k) = outputs{layersNumber+1}(1,1);
            
        end 
            
 %       size(obtainedOutputs)
 %       size(expectedOutputs)
        %calculo el error del conjunto de entrenamiento
        totalError = error(obtainedOutputs, expectedOutputs);
            
        if(totalError < totalErrorMin)
            totalErrorMin = totalError;
            WvecMin = Wvec;
        end
        
        %eta adaptativo
%        if (totalError - totalErrorAnt < 0)
%            decrease = decrease + 1;
%            if (mod(decrease,4) == 0)
%                etha = adaptativeEtha('increaseEtha')
%            end
%        elseif(totalError - totalErrorAnt > 0)
%            decrease = 0;
%            etha = adaptativeEtha('decreaseEtha')
%        else
%            etha = adaptativeEtha('doNothing')
%        end         
%        totalErrorAnt = totalError;
            
        age = age + 1;
        errors(age)=totalError;
        %obtainedOutputs;
        %expectedOutputs;
    end
    answer=Wvec;
    cputime;
end

%mejora momentum
function [answer momentumResp]=updateW(W, delta, output, momentum, firstTime)
    global etha;
    global alpha;
    aux1 = etha.*delta';
    aux = aux1*output;
    momentumResp = aux;
    if firstTime==1
        answer = W + aux;
    else
        answer = W + aux + alpha*momentum;
    end
end

function answer = calculateTestOutput(Wvec, testSet)
    testSet = horzcat(testSet, ones(rows(testSet), 1)*-1);
    for i=1:rows(testSet)
        currentTest = testSet(i,:)';
        for j=1:cols(Wvec)
            h = Wvec{j}*currentTest;
            currentTest = g(h);
            currentTest = vertcat(currentTest, -1);
        end
        testOutputs(i) = currentTest(1);
    end
    answer = testOutputs;
end

function answer = adaptativeEtha(func)
    global etha;
    global AdaptativeEthaParameterA;
    global AdaptativeEthaParameterB;
    switch(func)
        case 'increaseEtha'
            answer = etha + AdaptativeEthaParameterA;
        case 'decreaseEtha'
            answer = etha - AdaptativeEthaParameterB*etha;
        case 'doNothing'
            answer = etha;
    end
end

function answer = firstDelta(h, output, expectedOutput)
    answer = (gDer(h))*(expectedOutput - output);
end

function answer = otherDelta(h, W, delta)
    aux1 = (gDer(h));
    aux2 = W'*delta';
    aux2 = aux2';
    answer = aux1 .* aux2;
end

function totalError = error(outputs, expectedOutputs)
    errors = outputs - expectedOutputs;
    totalError = sum(errors.^2)*0.5;
end

function answer=generateWeightVector(trainSet, expectedOuts)
    global hiddenLayerNeurons;
    layerNeurons = zeros();
    layerNeurons(1) = cols(trainSet(1,:))-1;
    for i=1:cols(hiddenLayerNeurons)
        layerNeurons(i+1) = hiddenLayerNeurons(i);
    end
    layerNeurons(cols(hiddenLayerNeurons) + 2) = cols(expectedOuts(1));
    for i=1:cols(layerNeurons)-1
        W = zeros(layerNeurons(i+1), layerNeurons(i)+1);
        answer{i} = setRandomWeights(W);
    end
end

%function answer=generateDeltasWeightVector(trainSet, expectedOuts)
%    global hiddenLayerNeurons;
%    layerNeurons = zeros(hiddenLayerNeurons + 2);
%    layerNeurons(1) = cols(trainSet(1,:))-1;
%    for i=1:cols(hiddenLayerNeurons)
%        layerNeurons(i+1) = hiddenLayerNeurons(i);
%    end
%    layerNeurons(cols(hiddenLayerNeurons) + 2) = cols(expectedOuts(1));
%    for i=1:cols(layerNeurons)-1
%        answer{i} = zeros(layerNeurons(i+1), layerNeurons(i)+1);
%    end
%end

function answer=setRandomWeights(W)
    ceil = 0.5;
    floor = -0.5;
    answer = rand(size(W))*(ceil - floor) + floor;
end

function answer=g(x)
    global beta;
    answer = tanh(beta*x);
end

function answer=gDer(x)
    global beta;
    answer = beta*(1-g(x).^2);
end