clear all
format long
global improvementType;
global IMPROVEMENT_LEARNING_RATE;
global IMPROVEMENT_MOMENTUM;
%cantidad de veces que se decremento el errorTotal de una iteracion a la
%otra
global totalErrorDecrease;
global totalErrorIncrease;
global LEARNING_RATE;
global MOMENTUM;
global OLD_DELTA_W;
global TOLERANCE;
global HIDDEN_NEURONS;
global K_MOMEMNTUM;

%==========================================================================
%constants   
%====DONT TOUCH THIS===================
IMPROVEMENT_LEARNING_RATE = 1;
IMPROVEMENT_MOMENTUM = 2;
FALSE = 0;
TRUE = 1;
EXP = 1;
TANH = 2;
SAMPLE_SIZE = 30000;
WINDOW = 3; %la amplitud de la ventana
P = SAMPLE_SIZE - (WINDOW -1);
INPUT_LENGHT = WINDOW; %coincide con el window
OUTPUT_LENGHT = 1;

%=======================================

%configuration Constants================
SEED = 41149;    
improvementType = [1 1];
%improvementType = [1 x]; means adaptative ETA is active
%improvementType = [0 x]; means adaptative ETA is inactive
%improvementType = [x 1]; means momentum is active
%improvementType = [x 1]; means momentum is inactive
LEARNING_RATE = 0.5;
K_MOMEMNTUM = 0.9; %represents ALFA (it doesn't matter if momentum is not active)
HIDDEN_NEURONS = 5;
LAYERS = 2;
ACTIVATION_TYPE = TANH; % EXP % TANH
BETA = 0.5;
MAX_EPOCHS_FLAT = 3;
%=========================================

%==========================================================================
if(improvementType(IMPROVEMENT_MOMENTUM) == 0)
    MOMENTUM = 0.0;
else
    MOMENTUM = K_MOMEMNTUM;
end
totalErrorDecrease = 0;
totalErrorIncrease = 0;
%==========================================================================
rand('seed', SEED);

if(ACTIVATION_TYPE == EXP ) 
    load inputMatrixExp.mat
    load outputMatrixExp.mat
    inputMatrix = inputMatrixExp;
    outputMatrix = outputMatrixExp;
    activationFn = 'expActivation';
    activationDerivate = 'expActDerivate';
    TOLERANCE = 0.1;
else
    load inputMatrixTanh.mat
    load outputMatrixTanh.mat
    inputMatrix = inputMatrixTanh;
    outputMatrix = outputMatrixTanh;
    activationFn = 'tanhActivation';
    activationDerivate = 'tanhActDerivate';
    TOLERANCE = 0.2;
end


ECM_MAX = (TOLERANCE*TOLERANCE/2) *0.1;



%==========================================================================
%initialization
OLD_DELTA_W = generateNullNet(INPUT_LENGHT, OUTPUT_LENGHT, LAYERS);

%get the first TRAINING_P training patterns from the total patterns
[trainingPatterns trainingResults] = selectTrainingPatterns(inputMatrix, outputMatrix);
TRAINING_P = size(trainingPatterns,1);
stop = FALSE;
net = constructNet(INPUT_LENGHT, OUTPUT_LENGHT, LAYERS, ACTIVATION_TYPE);

%==========================================================================
%train the neuronal net
epoch = 1;
passed = 0;
lastTotalError = 999999999999999999999999999.9;
lastECMError = 9999999999999999999999999999.9;
lastPassed = 0;
flat_epochs = 0;
while((stop == FALSE))
    fprintf(1,'\nepoca: %g\n', epoch);
    %present the patterns in a different order each time
    [trainingPatterns trainingResults] = shufflePatterns(TRAINING_P, trainingPatterns, trainingResults);
    backupNet = net;
    net = trainNet(net, trainingPatterns, trainingResults, activationFn, activationDerivate, BETA);
    [passed ECM totalError ] = hasLearned(net, trainingPatterns, trainingResults, activationFn, BETA);
    LEARNING_RATE = calcLearningRate(ECM, lastECMError, passed, TRAINING_P);
    if(improvementType(IMPROVEMENT_LEARNING_RATE)==1)
        if(totalError > lastTotalError)
            ECM = emc_error(epoch-1);
            passed = learning_plot(epoch-1);
            net = backupNet;
        end
    end
    
    if(passed == lastPassed)
        flat_epochs = flat_epochs +1;
    else
        flat_epochs = 0;
    end
    lastPassed = passed;
    if( flat_epochs >= MAX_EPOCHS_FLAT)
        net = generateNoisyNet(net, INPUT_LENGHT, OUTPUT_LENGHT, LAYERS);
        fprintf(1, 'generacion de ruido\n');
    end
    
    
    lastTotalError = totalError;
    lastECMError = ECM;
    fprintf(1,'cantidad de patrones memorizados: %g\n', passed);
    fprintf(1,'error cuadratico medio: %g\n', ECM);
    
    if (ECM < ECM_MAX || passed == size(trainingPatterns,1))
       stop = 1;
    end
    learning_plot(epoch) = passed;
    emc_error(epoch) = ECM;
    total_error(epoch) = totalError;
    learning_rate(epoch) = LEARNING_RATE;
    epoch = epoch + 1;
    if(mod(epoch,1)==0)
        %plot(emc_error);
        subplot(3,1,1); plot(emc_error);
        subplot(3,1,2); plot(learning_plot);
        subplot(3,1,3); plot(learning_rate);
        drawnow;
    end
    
end

fprintf(1,'\nFIN DEL ENTRENAMIENTO\nResultados:\n');

fprintf(1, 'epocas: %g\n', epoch);
fprintf(1, 'cantidad de patrones memorizados: %g\n', passed);
fprintf(1, 'error cuadratico medio: %g\n', ECM);

%==========================================================================
%testing all the patterns
[FINALpassed FINALECM FINALtotalError NO_PASO_1 NO_PASO_0] =  generalize(net, inputMatrix, outputMatrix, activationFn, BETA );
percentPassed = (100.0 * FINALpassed)/P;

fprintf(1,'\nCapacidad de generalizacion:\n');
fprintf(1,'cantidad de patrones aprendidos: %g\t (%g %%)\n', FINALpassed, percentPassed);
fprintf(1,'error cuadratico medio sobre todos los patrones: %g\n', FINALECM);
fprintf(1,'cantidad de patrones con  1 no aprendidos: %g\n', NO_PASO_1);
if(ACTIVATION_TYPE == EXP ) 
    fprintf(1,'cantidad de patrones con  0 no aprendidos: %g\n', NO_PASO_0);
else
    fprintf(1,'cantidad de patrones con -1 no aprendidos: %g\n', NO_PASO_0);
end

