%% Nueral Net Project 1
% Ashkan Akbariyeh
%% Program 1
% Part I: linear equation solution
clear;clc;
disp('Part I: linear equation solution');
data=[% p x1 t1 %
1 2.1 1.2;
2 -1 -2.1;
3 4.3 8.4;
4 3.5 7.2;
5 -3.7 -7;
6 8.2 16.1;
7 -5.4 -11.1 ];
Nv=length(data); % number of rows of data
N=1; % number of inputs xn
M=1; % number of outputs tn
c=0;r=0;Et=0;
for p=1:Nv
xp=data(p,1+1);
tp=data(p,1+N+1:1+N+M);
c=c+xp*tp; %cross correlation
r=r+xp*xp; % auto correlation
Et=Et+tp*tp; % tp auto correlation
end
c=c/Nv; r=r/Nv; Et=Et/Nv;
w=c/r; % weight
disp('   r         c          w=c/r');
disp([r,c,w]);
disp('=============================');
%%
% Part II: steepest decent solution for N=M=1
disp('Part II: steepest decent solution for N=M=1');
w=0.0;
B2=0.002;
Nit=35;
disp('it   E           w           g');

for it=1:Nit
    g=-2*(c-w*r); % gradient
    E=Et-2*w*c+w*w*r; % error
    str=sprintf('%d    %f    %f    %f',it,E,w,g);
    w=w-B2*g;
    disp(str);
end
disp('=============================');
%%
% Part III
% steepest decent with optimal learning factor
disp('steepest decent with optimal learning factor');
w=0.0;
Nit=3; % number of iterations
disp('it   E           w           g');

for it=1:Nit
    g=-2*(c-w*r); %gradient
    if (r*g==0)
        B2=0;
    else
        B2=-(c-w*r)/(g*r); % optimal B2 to update w
    end;
    E=Et-2*w*c+w*w*r;
    str=sprintf('%d    %f    %f    %f',it,E,w,g);
    w=w-B2*g;
    disp(str);
end
disp('=============================');

%% Program 2
% Part I: linear equation solution
clear;
disp('Solution for w1 and w2 for N=2 and M=1');
data=[% p x1 x2 t1 %
1 2.1 -1.2 1.1;
2 -1 0.95 0.93;
3 4.3 -2.4 2.2;
4 3.5 8.2 31.4;
5 -3.7 -4 -18.7;
6 8.2 12.1 54.1;
7 -5.4 -3.1 -16.7 ];
Nv=length(data); % number of rows of data
N=2; % number of inputs xn
M=1; % number of outputs tn
c=[0;0];r=[0,0;0,0];Et=0;
for p=1:Nv
xp=data(p,1+1:1+N);
tp=data(p,1+N+1:1+N+M);
c(1)=c(1)+xp(1)*tp; %cross correlation
c(2)=c(2)+xp(2)*tp;
r(1,1)=r(1,1)+xp(1)*xp(1); % auto correlation
r(2,2)=r(2,2)+xp(2)*xp(2); % auto correlation
r(1,2)=r(1,2)+xp(1)*xp(2); % auto correlation
r(2,1)=r(1,2);
Et=Et+tp*tp;
end
c=c/Nv; r=r/Nv; Et=Et/Nv;
c
r
disp('========================');
disp('[r]*{w}=[c]')
disp('{w}= r\c')
disp('w1=(c1*r22)/(r11*r22 - r12^2) - (c2*r12)/(r11*r22 - r12^2)');
disp('w2=(c2*r11)/(r11*r22 - r12^2) - (c1*r12)/(r11*r22 - r12^2)');
w=r\c;
w
%%
% Part II: Iterative solution with optimal learning factor B2
disp('Part II: Iterative solution with optimal learning factor B2');
g=[0;0];
w=[0;0];
E=0;
Nit=6; % number of Iterations
disp('it   E           w(1)        w(2)        g(1)        g(2)');
for it=1:Nit
    g(1)=-2*( c(1) -w(1)*r(1,1) -w(2)*r(1,2) ); % gradient
    g(2)=-2*( c(2) -w(1)*r(2,1) -w(2)*r(2,2) ); % gradient
    num=0; den=0; % calculating B2
    for n=1:2
        num=num -g(n)*c(n);
        for m=1:2
            num=num +g(n)*w(m)*r(n,m);
            den=den +g(n)*g(m)*r(n,m);
        end
    end
    if (den==0)
        B2=0;
    else
        B2=num/den; % optimal learning factor
    end
    E= Et + w(1)*w(1)*r(1,1) + w(2)*w(2)*r(2,2)...
        -2*w(1)*c(1) - 2*w(2)*c(2) + 2*w(1)*w(2)*r(1,2);
    str=sprintf('%d    %f    %f    %f    %f    %f',it,E,w(1),w(2),g(1),g(2));
    disp(str);
    w=w-B2*g;
end










