clear all;  clc;

%no_set=input('How many data set you want (between 1 to 1000) b(2)= ');

%a=load('tb_pr_17may_land_1.m');
%f1=fopen('tb_pr_com_sea_all.m','r');
%f1=fopen('tb_pr_14march.m','r');
%f1=fopen('tb_pr_sea_cls2.txt','r');
%a=load('14march_25aprl.m');
%a=fscanf(f1,'%f %f',[10 3035]);

%a=load('TB_PR_SEA.m');
%a=load('tb_pr_25_27aprl_13may.m');
%a=load('cls1_25_27_test.m');
a=load('tb_pr_25_27aprl_nsr.m');
%a=load('tb_pr_14march_nsr.m');
a=load('tb_pr_21june_nsr.m');
a=load('tb_pr_17may_nsr.m');

[A,B]=size(a);
a=a';
p=a(1:9,:);  % INPUT %

t=a(10,:);   % TARGET %

in=input('No. of inputs in= ');

b=size(p);
if in>b(1)
    disp(' Your Input feature No. is wrong. Please Try again')
    break;
end;
h=input('No. of hidden nodes h= ');
n=input('No. of iterations n= ');

f3=fopen('Time15000.m','r');
tm=fscanf(f3,'%f %f',[1,A]);

whi=randnr(h,in); woh=randnr(1,h); att(1:in,:)=-5.5;
%load ('whi.txt'); load ('woh.txt'); load ('att.txt');

F_att=1./(1+exp(-att));
tb=max(p');
pr=max(t');
    
         minp = min(p')';
         maxp = max(p')';
        
[R,Q]=size(p);
oneQ = ones(1,Q);

tbn = (p-minp*oneQ)./((maxp-minp)*oneQ); 
        mint = min(t')';
        maxt = max(t')';
        
prn = (t-mint*oneQ)./((maxt-mint)*oneQ); 

%learning rate%      %momentum factor%

  lr=0.0035;          m=0.07;
  mu=0.0014;          m1=0.05;
 % lr=0.0000427535;        m=0.000257327;
 % mu=0.0000244214;       m1=0.000134105;
  
  
 % lr=0.000082491;          m=0.00045812849;
 % mu=0.000091282;         m1=0.00012581638;


% lr=0.0000552;          m=0.000081249;
%  mu=0.0000753;          m1=0.006138;

for i=1:n 
     
    %LEARNING THE DATA SET%
    
    for j=1:A % no. of set%
        Vh=whi*[tbn(:,j).*F_att];
        phi_h=1./(1+exp(-Vh));
        Vo=woh*phi_h;
        phi_o=1./(1+exp(-Vo));
        
        e=prn(:,j)-phi_o;  % Error at Output%

        % DESCENT GRAIDIANT %
    
        %**del_o=- (d(epsilon)/de)* (de/dphi_o)*(d(phi_o)/dVo)**%
        del_o= e* (-1) * phi_o*(1-phi_o); % For output layer %      
        
        % **del_h=phi_h*(1-(phi_o))*woh*del_o** % 
        
        del_h=[phi_h*(1-phi_h)']*[woh'*del_o]; %For hidden layer%
        
         %  WEIGHT CORRECTION %
        dw_o=lr*del_o*(phi_h)';
        dw_o=dw_o + m*del_o*(phi_h)';
         
        dw_h=lr*del_h*(tbn(:,j))';
        dw_h=dw_h + m* del_h*[tbn(:,j).*F_att]';
         
        % WEIGHT UPDATION %
        new_woh=woh - dw_o; % For output layer %
         
        new_whi=whi - dw_h; % For hidden layer%
         
                    
           %ATTENUATION CORRECTION%
        
        datt=mu*[whi'*del_h]*[tbn(:,j).*F_att]'*(1-F_att);        
         %  9x1 Matrix      %  %   1x9 Mtrix   % %  9x1  % 
        datt=datt + m1*[whi'*del_h].*[tbn(:,j).*(F_att.*(1-F_att))];
        
               
           %ATTENUATION UPDATION%
        new_att=att - datt;
        att=new_att;
        F_att=1./(1+exp(-att)); %9x1 matrix%

        woh=new_woh; % Assigning New weight with woh & whi%
        whi=new_whi;  % Now in  next loop this woh & whi  
                        % value will be counted

        tn_o(j,:)=phi_o;
        sq_err(j,:)=e^2;
        
    end; 

    tn_o;   % THIS IS THE OUTPUT FOR 1ST ITERATION%  
    
    sq_err ;
    mse=mean(sq_err);     %%%sse=sum(sq_err)
    mse_tr(i,:)=mse;      %%%sse_tr(i,:)=sse;
    epoch(i,:)=i;
    disp('No of iteration= '); disp(i);    
    disp('Mean square error= '); disp(mse_tr(i,:));
    disp('Attenuation= ');  disp(F_att);  
    
    
end;

mse_tr;
tn_o_tr=tn_o; % THIS IS THE OUTPUT FOR n NO. OF ITERATION%
t_o=tn_o_tr*pr;
  
figure(6)
plot(epoch,mse_tr,'r')

figure(7)
%subplot(3,1,1)
plot(p,t,'or',p,t_o,'+b')
title('Training Set')
    
figure(8)
plot(tm,t_o,'r',tm,t)

fclose('all');
 
 
 
 
 