%Generación conjunto de entrenamiento y su target
N=500;
P=zeros(2,N);
T=zeros(1,N);
%Creamos con numeros aleatorios que esten dentro del dominio
for i=1:N
    x = random('unif',-5,5);
    y = random('unif',-10,10);
    while x + 0.5*y > 2*pi || x + 0.5*y < 0
        x = random('unif',-5,5);
        y = random('unif',-10,10);
    end
    P(1,i) = x;
    P(2,i) = y;
    T(1,i) = sin(x + 0.5*y);
end
%Entradas generadas
figure('Name', 'Entradas');
plot(P(1,:), P(2,:), '.');

%Creamos una red con esta configuracion
net = newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingd', 'learngd');
%entrenamos la red
[trained_net, tr] = train(net, P, T);
plotperform(tr)
plottrainstate(tr)

%Analizamos como se comporta interpolando y extrapolando
%Interpolación %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%generamos la muestra:
[X, Y] = meshgrid(0:0.1:pi, 0:0.1:pi);
tam = size(X);
INPUT = zeros(2, tam(1)*tam(2));
RES = zeros(tam(1), tam(2));
p = 1;
for i=1:tam(1)
    for j=1:tam(2)
        INPUT(1,p) = X(i,j);
        INPUT(2,p) = Y(i,j);
        p = p + 1;
    end;
end;
%simulamos  con la red ya entrenada
OUTPUT=sim(trained_net, INPUT);

%rellenamos RES con los valores obtenidos
p = 1;
for i=1:tam(1)
    for j=1:tam(2)
        RES(i,j) = OUTPUT(1,p);
        p = p + 1;
    end;
end;
%generamos la salida esperada
SENO = sin(X + 0.5*Y);

%calculamos el error cuadrático medio.
DIFF = sqrt(SENO*SENO - RES*RES);
error = sum(sum(DIFF))/(tam(1)*tam(2));
fprintf('el error por interpolar es: %g\n', error);

%Extrapolación %%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%generamos la muestra:
[X, Y] = meshgrid(2*pi:0.1:3*pi, 2*pi:0.1:3*pi);
tam = size(X);
INPUT = zeros(2, tam(1)*tam(2));
RES = zeros(tam(1), tam(2));

%generamos INPUT
p = 1;
for i=1:tam(1)
    for j=1:tam(2)
        INPUT(1,p) = X(i,j);
        INPUT(2,p) = Y(i,j);
        p = p + 1;
    end;
end;
%simulamos  con la red ya entrenada
OUTPUT=sim(trained_net, INPUT);

%rellenamos RES con los valores obtenidos
p = 1;
for i=1:tam(1)
    for j=1:tam(2)
        RES(i,j) = OUTPUT(1,p);
        p = p + 1;
    end;
end;
%generamos la salida esperada
SENO = sin(X + 0.5*Y);

%calculamos el error cuadrático medio.
DIFF = sqrt(SENO*SENO - RES*RES);
error = sum(sum(DIFF))/(tam(1)*tam(2));
fprintf('el error por extrapolar es: %g\n', error);



%comparamos el desempeño de la red vs la función original
%generamos la misma entrada para ambas funciones.
[X, Y] = meshgrid(-2*pi:0.2:2*pi, 0:0.2:4*pi);
tam = size(X);
INPUT = zeros(2, tam(1)*tam(2));
RES = zeros(tam(1), tam(2));
p = 1;
for i=1:tam(1)
    for j=1:tam(2)
        INPUT(1,p) = X(i,j);
        INPUT(2,p) = Y(i,j);
        p = p + 1;
    end;
end;
%simulamos  con la red ya entrenada
OUTPUT=sim(trained_net, INPUT);

%rellenamos RES con los valores obtenidos
p = 1;
for i=1:tam(1)
    for j=1:tam(2)
        RES(i,j) = OUTPUT(1,p);
        p = p + 1;
    end;
end;

%graficamos
figure('Name', 'Función Seno');
mesh(X,Y, sin(X + 0.5*Y));
figure('Name', 'Red evaluada');
mesh(X,Y,RES);

%Comparamos distintas tecnicas de optimización
net_momentum =  newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingdm', 'learngdm');
net_adapt =     newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingda', 'learngd');
net_all =       newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingdx', 'learngdm');
[trained_net, tr] = train(net_momentum, P, T);
plotperform(tr)
plottrainstate(tr)
[trained_net, tr] = train(net_adapt, P, T);
plotperform(tr)
plottrainstate(tr)
[trained_net, tr] = train(net_all, P, T);
plotperform(tr)
plottrainstate(tr)


%distintos valores para la velocidad de aprendizaje

lrs = [0.5 0.1 0.05 0.001];

%comparamos con distintos valores de velocidad de aprendizaje.
net_lr1 =       newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingd', 'learngd');
net_lr1.trainParam.lr = lrs(1);
net_lr2 =       newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingd', 'learngd');
net_lr2.trainParam.lr = lrs(2);
net_lr3 =       newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingd', 'learngd');
net_lr3.trainParam.lr = lrs(3);
net_lr4 =       newff(minmax(P), [15 40 25 1], {'tansig' 'tansig' 'tansig' 'purelin'}, 'traingd', 'learngd');
net_lr4.trainParam.lr = lrs(4);
[trained_net, tr] = train(net_lr1, P, T);
plotperform(tr)
plottrainstate(tr)
[trained_net, tr] = train(net_lr2, P, T);
plotperform(tr)
plottrainstate(tr)
[trained_net, tr] = train(net_lr3, P, T);
plotperform(tr)
plottrainstate(tr)
[trained_net, tr] = train(net_lr4, P, T);
plotperform(tr)
plottrainstate(tr)
