global LEARNING_RATE;
global LAYERS;
global BETA;
global SIZE;
global MOMENTUM_FACTOR;
global deltasBackup;
global checkDeltasBackup;
global activationFunc;

%Patterns defines:
global one0Q;
global one1Q;
global one2Q;
global one3Q;
global one0QDiff;
global one1QDiff;
global one2QDiff;
global one3QDiff;

%Variable del programa
checkDeltasBackup = 0;

%Cantidad de patrones de entrenamiento de cada tipo (unos consecutivos)
one0Q = 2000;
one1Q = 2000;
one2Q = 4500;
one3Q = 6500;

%Cantidad de patrones DIFERENTES que van a integrar los conjuntos
%anteriores
one0QDiff = 2000;
one1QDiff = 155;%159
one2QDiff = 70;%72
one3QDiff = 190;%197

% activationFunc = 0 => tanh, activationFunc = 1 => exp
activationFunc = 0;

BETA = 0.7;

%Si se desea setear la semilla
%SEED = 123912308;
%rand('state', SEED);

%eta
LEARNING_RATE = 0.14;

%ETA adaptativo para cdo aprendio mas que el anterior. 0=DISABLE
ADAPTATIVE_LEARNING_RATE_MAX = 0.01;

%ETA adaptativo para cdo aprendio menos que el anterior. 0=DISABLE
ADAPTATIVE_LEARNING_RATE_MIN = 0.017;

%Factor para ver en cuantos pasos viene mejorando
IMPROVEMENT_FACTOR = 3;

%MOMENTUM FACTOR. 0=DISABLE
MOMENTUM_FACTOR = 0.9;

%delta aceptado como error para la admision de un patron como correcto
DELTAERR = 0.01;

% [-RANDMULTIPLIER,RANDMULTIPLIER] rango de pesos randoms iniciales
RANDMULTIPLIER = 0.9;

%Corte por Limite de epocas
EPOCHS_LIMIT = 500;

%Corte por MAX ECM
MAXECM = 0.001;

%Ventaneo
window = 3;
%Overlapeo de ventanas
overlapping = 2;
%Datainput
dataset = importdata('../samples/samples2.txt');
[trainingPatterns testingPatterns] = parseSignal(dataset,window,overlapping);
EPOCH_SIZE = size(trainingPatterns,2)

LAYERS = 4;
%size de capas (LAYERS+1)
%la primera bits de entrada y la ultima bits de salida!
%Arquitectura
SIZE = [window,3,10,5,1];

%Pesos randoms
weight = []; 
for i=1:LAYERS
   weight{i} = RANDMULTIPLIER + (-2*RANDMULTIPLIER) * rand(SIZE(i+1),SIZE(i)+1);
end

%Voy guardando la mejor red
bestWeight = [];
bestSolvedInputs = 0;
solvedPerEpoch = [];
errorPerEpoch = [];

consistenImprovement = 0;

%clc
fprintf('TRAINING:\n\n');
j = 1;
currentECM = 1;
while currentECM >= MAXECM && j <= EPOCHS_LIMIT
    %ciclo de entrenamiento: 1 epoca, analiza todos los patrones
    solvedInputs = zeros(EPOCH_SIZE,1);
    errorPerEpoch(j) = 0;
    for aux=1:EPOCH_SIZE
        input = trainingPatterns{aux}(:,1);
        output = trainingPatterns{aux}(:,2);
        desiredOutput = solveInput(output);

        %evaluo la red con el input
        allOutputs = [];
        output = input;
        for i=1:LAYERS
            [ output h ] = SPerceptron(output, weight{i});
            allOutputs{i} = h;
        end
        
        if abs(sum(output-desiredOutput')) <= DELTAERR
            %Lo aprendio
            if solvedInputs(aux)==0
                solvedInputs(aux)=1;
            end
        end
        
        %Siempre aplico backpropagation
        weight = Learn(weight, output, desiredOutput', input, allOutputs);
        %Voy sumando el error de cada patron
        errorPerEpoch(j) = errorPerEpoch(j)+sum(output-desiredOutput')^2;
    end
    solvedPerEpoch(j) = sum(solvedInputs);
    
    %ECM:
    errorPerEpoch(j) = (errorPerEpoch(j)/EPOCH_SIZE)*0.5;
    currentECM = errorPerEpoch(j);
    fprintf('Epoca: %d - error cuadratico medio: %f\n',j,errorPerEpoch(j));
    
    %Si resolvio mas inputs que la mejor que habia, me lo guardo
    if solvedPerEpoch(j) > bestSolvedInputs
       bestSolvedInputs = solvedPerEpoch(j);
       bestWeight = weight;
       fprintf('BestSolvedInputs: %d\n',bestSolvedInputs);
       if bestSolvedInputs == EPOCH_SIZE
           fprintf('DONE: Solved all inputs\n');
           fprintf('Took %d epochs\n\n',j);
       end
    end
    
    
    %ETA ADAPTATIVO:
    if j>1
        if errorPerEpoch(j)<errorPerEpoch(j-1)
           consistenImprovement = consistenImprovement + 1;
           if consistenImprovement >= IMPROVEMENT_FACTOR
               LEARNING_RATE = LEARNING_RATE + ADAPTATIVE_LEARNING_RATE_MAX;
           end
        elseif errorPerEpoch(j)>errorPerEpoch(j-1)
            consistenImprovement = 0;
            LEARNING_RATE = LEARNING_RATE - (LEARNING_RATE * ADAPTATIVE_LEARNING_RATE_MIN);
        end
    end
    j = j + 1;
end

j = j - 1;

%%GRAFICOS:
hold off
%Grafico de entradas resueltas por epoca:
plot(1:j,solvedPerEpoch(1:j))
xlabel('Epocas');
ylabel('Cantidad inputs resueltas');
title('Cantidad de inputs resueltas por epoca');
grid on
print -dpng ../runs/solved.png

%Grafico de error cuadratico medio por epoca:
hold off;
plot(1:j,errorPerEpoch(1:j),'r');
xlabel('Epocas');
ylabel('ECM');
title('Error cuadratico medio por epoca');
grid on
print -dpng ../runs/ecm.png

fprintf('TESTING:\n');
failed = 0;
EPOCH_SIZE = size(testingPatterns,2);
for aux=1:EPOCH_SIZE
    input = testingPatterns{aux}(:,1);
    output = testingPatterns{aux}(:,2);
    desiredOutput = solveInput(output);
    
    output = input;
    for i=1:LAYERS
        output = SPerceptron(output, weight{i});
    end

    if abs(sum(output-desiredOutput')) <= DELTAERR
        %fprintf('%d GOOD\n',aux);
    else
        fprintf('%d BAD - %d cant unos\n',aux, sum(testingPatterns{aux}(:,2)));
        failed = failed + 1;
    end
end

if failed ~= 0
    fprintf('TESTING FAILED\n');
else
    fprintf('TESTING PASSED\n');
end

fprintf('%0.2f%% CORRECT\n', 100-(failed/EPOCH_SIZE*100));


%Guardo set de datos en un archivo:

fid = fopen('../runs/data.txt','w+');
fprintf(fid,'========================================\n');
fprintf(fid,'activationFunc = %d\n',activationFunc);
fprintf(fid,'BETA = %d\n',BETA);
fprintf(fid,'SEED = %f\n',SEED);
fprintf(fid,'LEARNING_RATE = %f\n',LEARNING_RATE);
fprintf(fid,'ADAPTATIVE_LEARNING_RATE_MAX = %f\n',ADAPTATIVE_LEARNING_RATE_MAX);
fprintf(fid,'ADAPTATIVE_LEARNING_RATE_MIN = %f\n',ADAPTATIVE_LEARNING_RATE_MIN);
fprintf(fid,'IMPROVEMENT_FACTOR = %f\n',IMPROVEMENT_FACTOR);
fprintf(fid,'MOMENTUM_FACTOR = %f\n',MOMENTUM_FACTOR);
fprintf(fid,'DELTAERR = %f\n',DELTAERR);
fprintf(fid,'RANDMULTIPLIER = %f\n',RANDMULTIPLIER);
fprintf(fid,'window = %f\n',window);
fprintf(fid,'overlapping = %f\n',overlapping);
fprintf(fid,'RANDMULTIPLIER = %f\n',RANDMULTIPLIER);
fprintf(fid,'trainingPatternsSize = %f\n',size(trainingPatterns,2));
fprintf(fid,'testingPatternsSize = %f\n',size(testingPatterns,2));

fprintf(fid,'SIZE = [ ');
for i=1:size(SIZE,2)
    fprintf(fid,'%d ',SIZE(i));
end
fprintf(fid,']\n');

fprintf(fid,'MAXECM = %f\n',MAXECM);
fprintf(fid,'LAST_EPOCH = %d\n',j);
fprintf(fid,'LAST_EPOCH_ECM = %f\n',currentECM);
fprintf(fid,'LAST_EPOCH_SOLVED = %d\n',solvedPerEpoch(j));

fprintf(fid,'one0 = %d - %d\n',one0Q,one0QDiff);
fprintf(fid,'one1 = %d - %d\n',one1Q,one1QDiff);
fprintf(fid,'one2 = %d - %d\n',one2Q,one2QDiff);
fprintf(fid,'one3 = %d - %d\n',one3Q,one3QDiff);
fprintf(fid,'%0.2f%% CORRECT\n', 100-(failed/EPOCH_SIZE*100));
fprintf(fid,'========================================\n');

fclose(fid);

save ../runs/solvedPerEpoch.txt solvedPerEpoch -ASCII
save ../runs/errorPerEpoch.txt errorPerEpoch -ASCII

