% Load 2001-2011 ic Deltah and JULIA data
% The drifts derived from the PIU-JIC delta H is as good as the
% Julia driftt data. The delta_h has a better coverage
% Since the delta h has more coverage use deltah dat wherever available
% June 17, 2011

if ispc == 1,
    load E:\projects\ace_tensor\piu_jic_deltah\Jic_drift_2001_2011;
    load E:\projects\ace_tensor\juliadata\Julia_2001_2011;
    
end;

if isunix == 1,
    load([ '/data/backup/mnair/ace_tensor/piu_jic_deltah/Jic_drift_2001_2011.mat']);
    load([ '/data/backup/mnair/ace_tensor/juliadata/Julia_2001_2011.mat']);
end;

jicfday = floor(Jic_delh_fday_n(:,100));
julfday = floor(Julia_fday_n(:,100));



for i = 1:length(Jic_delh_fday_n),
    
    dummy = floor(Jic_delh_fday_n(i,:));
    
    K = dummy(~isnan(dummy));
    
    if any(K)
        
        
        jicfday(i) = K(1);
        
    else
        
        jicfday(i) = NaN;
    end;
end;


for i = 1:length(Julia_fday_n),
    
    dummy = floor(Julia_fday_n(i,:));
    
    K = dummy(~isnan(dummy));
    
    if any(K)
        
        
        julfday(i) = K(1);
        
    else
        
        julfday(i) = NaN;
    end;
end;

% Plot the JULIA and DeltaH common day data
% load E:\projects\geomag\indices\aplist.mat;
% [c,ia,ib] = intersect(jicfday,julfday);
%
% for i = 1:800,
% plot(Jic_delh_fday_n(ia(i),:),Jic_delh_W_n(ia(i),:)); hold on;
% plot(Julia_fday_n(ib(i),:),Julia_W_n(ib(i),:),'r');
% hold off;
%
% this_fday = jicfday(ia(i));
%
% ap_day = find(fday_ap==this_fday);
% this_ap = mean ( ap (ap_day:ap_day + 8 ) );
% title(sprintf('%s %2.0f',datestr(this_fday), this_ap));
%
% pause;
%
% end;

%%  Combine JULIA and deltah data

if ispc == 1,
    
    load E:\projects\ace_tensor\piu_jic_deltah\Jic_drift_2001_2011;
    load E:\projects\ace_tensor\juliadata\Julia_2001_2011;
    
    
end;

if isunix == 1,
    
    load(['/data/backup/mnair/ace_tensor/piu_jic_deltah/Jic_drift_2001_2011']);
    load(['/data/backup/mnair/ace_tensor/juliadata/Julia_2001_2011']);
    
end;

% Select the option below to control the sources of the data for the
% transfer function analysis !!


selection = 5;  % 1 - Combined, Jic preferred. 2 Combined Julia Preferred
% 3 - JULIA only               4 Jic Delta H only
% 5 Combine JULIA and DeltaH * Note * There will be
% repeated dates
% Alken delh EEF

if selection == 1,
    
    %Make Combined data set. If JULIA data and deltaH are available
    %for the same day, JULIA data are omitted
    
    [c,ia] = setdiff(julfday,jicfday);
    
    Selected_Julia_W = Julia_W_n(ia,:);
    Selected_Julia_fday = Julia_fday_n(ia,:);
    Selected_julfday = julfday(ia);
    
    Drift_Comb_W = [Jic_delh_W_n;Selected_Julia_W];
    Drift_Comb_fday = [Jic_delh_fday_n;Selected_Julia_fday];
    Drift_Comb_fday_vec = [jicfday;Selected_julfday];
    
    [y,i] = sort(Drift_Comb_fday_vec);
    
    Drift_Comb_W = Drift_Comb_W(i,:);
    Drift_Comb_fday = Drift_Comb_fday(i,:);
    Drift_Comb_fday_vec = y;
    
elseif selection == 2,
    
    %Make Combined data set. If JULIA data and deltaH are available
    %for the same day, deltaH data are omitted
    
    [c,ia] = setdiff(jicfday,julfday);
    
    Selected_Julia_W = Jic_delh_W_n(ia,:); % Selected_Julia_W is really Selected_Jic_W
    Selected_Julia_fday = Jic_delh_fday_n(ia,:);
    Selected_julfday = jicfday(ia);
    
    Drift_Comb_W = [Julia_W_n;Selected_Julia_W];
    Drift_Comb_fday = [Julia_fday_n;Selected_Julia_fday];
    Drift_Comb_fday_vec = [julfday;Selected_julfday];
    
    [y,i] = sort(Drift_Comb_fday_vec);
    
    Drift_Comb_W = Drift_Comb_W(i,:);
    Drift_Comb_fday = Drift_Comb_fday(i,:);
    Drift_Comb_fday_vec = y;
    
elseif selection == 3,
    
    % Only JULIA data
    
    Drift_Comb_W = Julia_W_n;
    Drift_Comb_fday = Julia_fday_n;
    Drift_Comb_fday_vec = julfday;
    
elseif selection == 4,
    
    % Only deltaH data
    
    Drift_Comb_W = Jic_delh_W_n;
    Drift_Comb_fday = Jic_delh_fday_n;
    Drift_Comb_fday_vec = jicfday;
    
elseif selection ==5,
    
    
    Drift_Comb_W = [Julia_W_n ; Jic_delh_W_n];
    Drift_Comb_fday = [Julia_fday_n ; Jic_delh_fday_n];
    Drift_Comb_fday_vec = [julfday ; jicfday];
    
end;

clear Jic_delh_W_n Jic_delh_fday_n Julia_W_n Julia_fday_n K Selected_Julia_W Selected_Julia_fday Selected_julfday a ans c dummy i ia ib jicfday julfday y selection;

if ispc == 1,
    
    save E:\projects\ace_tensor\Drift_Data_Combined_2000_2011
end;

if isunix == 1,
    
    save( [ '/data/backup/mnair/ace_tensor/Drift_Data_Combined_2000_2011.mat']);
    
end;


%% Load the drift & ace data, find common places where data are available
% make a time series

if ispc == 1,
    load E:\projects\ace_tensor\acedata\ace_2000_2010.mat;
    load E:\projects\ace_tensor\Drift_Data_Combined_2000_2011;
    load E:\projects\geomag\indices\aplist.mat;
end;

if isunix == 1,
    load([ '/data/backup/mnair/ace_tensor/acedata/ace_2000_2010.mat']);
    load([ '/data/backup/mnair/ace_tensor/Drift_Data_Combined_2000_2011.mat']);
    load([ '/data/backup/mnair/geomag/indices/aplist.mat']);
end;


if exist('TIME_SEG','var') && exist('JULI_SEG','var') && exist('ACE_SEG','var'), % remove existing variables
    clear TIME_SEG JULI_SEG ACE_SEG;
end;

%w=w_climate; %This copies the Julia drift (measured)-Julia drift (modeled) to w
ace_fday = floor(ace_all(:,1));
ap_lower_limit = 20; %lower limt if Ap
% solar_flux_limit = 0;
N_seg = 1;%the increasing counter for array JULI_SEG & ACE_SEG
N_data = 1;
phase_delay = 17;%minutes
mjd_date = datenum(2000,1,1);
plc = 'b';%plot color


seg_length = 72; % 72 - JULIA, 100-120 deltah
fft_length = 72; % for mtprocn, the seg lengths should be 2^

w = Drift_Comb_W;

gh = [1:132];

for i = 1:length(w),
    L = isnan(w(i,:));
    Julia_W(i).k = gh(~L);
    Julia_W(i).fday = Drift_Comb_fday_vec(i);
end;

fday = Drift_Comb_fday;


for i = 1: size(w,1),
    if sum(diff(Julia_W(i).k))+1 == length(Julia_W(i).k) && sum(diff(Julia_W(i).k))+1 >= seg_length &&...
            sum(isnan(w(i,:))) <= 60,
        
        L = ace_fday == Julia_W(i).fday; %select the ace data for the current JULIA fday
        
        if sum(L) > 0, % If there is ACE data
            ace_time = ace_all(L,1)+(phase_delay)/(60*24);%Try to advance 60 VS model/ ace time minutes
            
            ace_ief_ey = ace_all(L,2);
            ace_ief_ez = ace_all(L,3);
            
            %     imf_bz_data = ace_gse_bz(L);
            %     mean_imf_bz = nanmean(ace_gse_bz(L));
            %     mean_sw= nanmean(sw(L,1));
            
            L = isnan(ace_ief_ey) | isnan(ace_ief_ez);
            
            if sum(L) < 10 && sum(L) ~= 0,% note that L < 10 is the normal set up which produces 265 data pairs
                %however, large gap interpolation results in steeper phases towards
                %lower periods. To alleviate this use less interpolation
                
                ace_ief_ey = interp1(ace_time(~L),ace_ief_ey(~L),ace_time);
                ace_ief_ez = interp1(ace_time(~L),ace_ief_ez(~L),ace_time);
                
            end;
            %
            
            %  fprintf('Number of missing points on fday %d = %d\n', Julia_W(i).fday, sum(L));
            %if abs(mean(diff(ace_time))-0.0028) <= 2.7778e-005, % Use this with Burke data
            if abs(mean(diff(ace_time))-0.0035) <= 1e-004, %Use this with ACE min averages
                JULI = w(i,Julia_W(i).k)-nanmean(w(i,Julia_W(i).k));
                ACEEY = interp1(ace_time, ace_ief_ey, fday(i,Julia_W(i).k)); %here  fday - 2/24 was used to find delayed coherence response.
                ACEEZ = interp1(ace_time, ace_ief_ez, fday(i,Julia_W(i).k)); %here  fday - 2/24 was used to find delayed coherence response.
                
                L = fday_ap >= fday(i,Julia_W(i).k(1)) & fday_ap <= fday(i,Julia_W(i).k(end));
                mean_ap = mean(ap(L));
                
                if ~any(isnan(ACEEY)) &&  ~any(isnan(ACEEZ)) && mean_ap >= ap_lower_limit,
                    
                    
                    
                    EEF_SEG(N_seg:N_seg+seg_length-1) = JULI(1:seg_length);
                    IEF_EY_SEG(N_seg:N_seg+seg_length-1) = ACEEY(1:seg_length);
                    IEF_EZ_SEG(N_seg:N_seg+seg_length-1) = ACEEZ(1:seg_length);
                    TIME_SEG(N_seg:N_seg+seg_length-1) = fday(i,Julia_W(i).k(1:seg_length));
                    
                    if length(JULI) > fft_length,
                        JULI(fft_length+1:end) = [];
                        ACEEY(fft_length+1:end) = [];
                        ACEEZ(fft_length+1:end) = [];
                    end;
                    
                    EEF_MAT(N_data,:)    = [JULI zeros([1,fft_length-length(JULI)])];
                    IEF_EY_MAT(N_data,:) = [ACEEY zeros([1,fft_length-length(ACEEY)])];
                    IEF_EZ_MAT(N_data,:) = [ACEEZ zeros([1,fft_length-length(ACEEZ)])];
                    
                    
                    
                    N_seg = N_seg+seg_length;
                    N_data = N_data+1;
                    
                    
                end;
            end;
            
        else
            
            fprintf('ACE data is missing for the day %s\n', datestr(Julia_W(i).fday));
            
        end;
    end;
end;

if ispc == 1,
    save E:\projects\ace_tensor\Electric_Field_Matrix seg_length EEF_SEG IEF_EY_SEG IEF_EZ_SEG TIME_SEG IEF_EY_MAT EEF_MAT IEF_EZ_MAT;
end;

if isunix == 1,
    
    save ([ '/data/backup/mnair/ace_tensor/Electric_Field_Matrix seg_length EEF_SEG IEF_EY_SEG IEF_EZ_SEG TIME_SEG IEF_EY_MAT EEF_MAT IEF_EZ_MAT']);
    
end;

%% Coherence, phase and tranfer function

if ispc == 1,
    load E:\projects\ace_tensor\Electric_Field_Matrix seg_length EEF_SEG IEF_EY_SEG IEF_EZ_SEG TIME_SEG;
end;

if isunix == 1,
    
    load '/data/backup/mnair/ace_tensor/Electric_Field_Matrix' seg_length EEF_SEG IEF_EY_SEG IEF_EZ_SEG TIME_SEG;
end;


EEF_SEG = EEF_SEG.*24.366*1e-3; %mV/m

[Cxx_IEF_Ey,F] = mscohere(EEF_SEG,IEF_EY_SEG,hanning(seg_length),0,seg_length,1/(5*60)); %1/(5*60) = sampling frequency in Hz (5*60 = 300 seconds)
[Cxx_IEF_Ez,F] = mscohere(EEF_SEG,IEF_EZ_SEG,hanning(seg_length),0,seg_length,1/(5*60)); %1/(5*60) = sampling frequency in Hz (5*60 = 300 seconds)

[Pxy,F] = cpsd(IEF_EY_SEG,EEF_SEG,hanning(seg_length),0,seg_length,1/(5*60)); %
phase = angle(Pxy);
[Tx,F] = tfestimate(IEF_EY_SEG,EEF_SEG,hanning(seg_length),0,seg_length,1/(5*60));
[Ty,F] = tfestimate(IEF_EZ_SEG,EEF_SEG,hanning(seg_length),0,seg_length,1/(5*60));
tf = conj(Txy');

%% plotting

%Coherence
figure1 = figure;
axes('Parent',figure1,'XTick',[0.1 1 10],...
    'XScale','log',...
    'XMinorTick','on');
set(gca,'FontSize',16);
box('on');
hold('all');
xlabel('period in hours');
ylabel('coherence');
hold on;

%confidence level of coherence
%ref Thompson, R.O., 1979: Coherence Significance Levels. J. Atmos. Sci.,
%*36*, 2020�2021.

alpha = 0.99;
ci = 1- (1-alpha)^(1/((N_data)-1));
aa=axis;
h=line([aa(1) aa(2)],[ci,ci],'LineStyle','-.','color',plc);
signif_coh = find(Cxx_IEF_Ey(2:end) > ci);
semilogx((1./(3600*F(signif_coh))),Cxx_IEF_Ey(signif_coh),'r','LineWidth',2);
semilogx((1./(3600*F(signif_coh))),Cxx_IEF_Ez(signif_coh),'b','LineWidth',2);
axis([0.1,10,0,1]);

%set(gcf,'position',[1002 484 784 583]);

set(gca,'XTick',[6/60,10/60,20/60,30/60,1,2,4,6,10]);
set(gca,'XTickLabel',[' 6';'10';'20';'30';' 1';' 2';' 4';' 6';'10']);
text(6/60,-0.08,'|<-','FontSize',16);
text(8,-0.08,'->|','FontSize',16);
text(40/60,-0.08,'-> | <-','FontSize',16);
text(10/60,-0.08,'Period in minutes','FontSize',16);
text(2,-0.08,'Period in hours','FontSize',16);


%% TF - MATLAB -%% tf
figure1=figure;
%set(figure1,'Position',[680   788   349   288]);

axes('Parent',figure1,'XTick',[0.1 1 10],...
    'XScale','log',...
    'XMinorTick','on');
set(gca,'FontSize',16);
box('on');
hold('all');
xlabel('period in hours');
ylabel('Magnitude (dB)');
hold on;

Xmag1 = abs(Tx);          % Spectral magnitude
Xmag2 = abs(Ty);
Xdb1 = 20*log10(Xmag1);   % Spectral magnitude in dB
Xdb2 = 20*log10(Xmag2);

% XdbMax = max(Xdb);      % Peak dB magnitude
% Xdbn = Xdb - XdbMax;    % Normalize to 0dB peak
%
% dBmin = -100;           % Don't show anything lower than this
% Xdbp = max(Xdbn,dBmin); % Normalized, clipped, dB mag spec

semilogx((1./(3600*F(2:end))),Xdb1(2:end),'r','LineWidth',2);
semilogx((1./(3600*F(2:end))),Xdb2(2:end),'b','LineWidth',2);
%set(gcf,'position',[1002 484 784 583]);
set(gca,'XTick',[6/60,10/60,20/60,30/60,1,2,4,6,10]);
set(gca,'XTickLabel',[' 6';'10';'20';'30';' 1';' 2';' 4';' 6';'10']);
text(6/60,-0.08,'|<-','FontSize',16);
text(8,-0.08,'->|','FontSize',16);
text(40/60,-0.08,'-> | <-','FontSize',16);
text(1,-0.08,'Period in minutes','FontSize',16);
text(2,-0.08,'Period in hours','FontSize',16);

axis([0.1 10 -55 -15 ]);

%% LT dependency
%

if ~exist('ace_all','var'),
    
    if ispc == 1,
        
        load E:\projects\ace_tensor\acedata\ace_2000_2010.mat ace_all;
        ace_all(:,1) = ace_all(:,1)+(17)/(60*24);%Try to advance 60 VS model/ ace time minutes
        load('C:\Manoj\projects\ace\jcamarca_isr_day_night.mat');
        
    end;
    if isunix == 1,
        load /data/backup/mnair/ace_tensor/acedata/ace_2000_2010.mat ace_all;;
        ace_all(:,1) = ace_all(:,1)+(17)/(60*24);%Try to advance 60 VS model/ ace time minutes
        load '/data/backup/mnair/ace/jcamarca_isr_day_night.mat';
    end;
    
end;

options.Eeff_cut_off = 8; % The cut off amplitude for IEF .LUHR & MAUS EPS 2010 used a value of 8
options.ace_interp_method = 'spline'; %FIXED spline
options.des_int = 0.25 ;

% Find data for each hour in a day
% the idea is to find cross correlation coefficient between the
% IEF Ey segments and the EEF data pieces centered around each LT hour.
% Hope to see the +ve day and -ve night correlation
% Use the cc and some division / scalinf factor to give LT dependency to
% the TF


%% remove large values in ACE data

L = ace_all(:,2) < 0;
temp = abs(ace_all(:,2));
Eeff = options.Eeff_cut_off * temp ./ sqrt (options.Eeff_cut_off^2 + temp.^2);
ace_all(:,2) = Eeff;
ace_all(L,2) = ace_all(L,2) * -1;

% interpolate and down sample

L = isnan(ace_all(:,2));
y = interp1(ace_all(~L,1),ace_all(~L,2),ace_all(:,1),options.ace_interp_method);
ace_down = resample(y,1,options.des_int*60/5); % Checked the resampling time axis OK
ace_inter = interp1(ace_all(1:options.des_int*60/5:end,1),ace_down,ace_all(:,1), options.ace_interp_method);
ace_all(:,2) = ace_inter;

%%
n_ut = 1;
ut_interval = 2;%hours
min_piece_length = ( ut_interval * 60 / 5 ) -  1;

datacount = zeros([1, 24 / ut_interval]);
correlation_ut = zeros([1,24 / ut_interval]);
correlation_sig = zeros([1,24 / ut_interval]);
correlation_rlo = zeros([1,24 / ut_interval]);
correlation_rup = zeros([1,24 / ut_interval]);
data_ratio = zeros([1,24 / ut_interval]);
ratio_count = zeros([1,24 / ut_interval]);




for i = 0 : ut_interval: ( 24 - ut_interval),
    
    L = (fday - floor(fday) >= (i/24)) & (fday - floor(fday) < ( (i + ut_interval) /24));
    
    this_ut_fday = fday(L);
    this_ut_drift = drift(L);
    
    K = floor(this_ut_fday);
    A = unique(K);
    
    
    
    for j = 1 : length(A),
        
        L = floor(this_ut_fday) == A(j);
        
        piece_of_drift =  this_ut_drift(L);
        piece_of_fday =  this_ut_fday(L);
        
        %get ace data for this piece of data
        
        L = ace_all(:,1) >= ( piece_of_fday(1) - 10/(60*24) ) & ...
            ace_all(:,1) <= ( piece_of_fday(end) + 10/(60*24) );
        
        ace_time = ace_all(L,1);
        ace_data = ace_all(L,2);
        
        piece_of_ace = interp1(ace_time, ace_data, piece_of_fday);
        
        if ~any(isnan(piece_of_ace))   && ~any(isnan(piece_of_drift)) && ...
                length(piece_of_ace)   >=  min_piece_length  && ...
                length(piece_of_drift) >=  min_piece_length,
            
            %[R,P,RLO,RUP]=corrcoef(...) also returns matrices RLO and RUP,
            %of the same size as R, containing lower and upper bounds for a 95%
            %confidence interval for each coefficient.
            
            [a,p,rlo,rup] = corrcoef(piece_of_ace, piece_of_drift);
            datacount(n_ut) = datacount(n_ut )  + 1;
            correlation_ut(n_ut) = correlation_ut(n_ut) + a(2,1);
            correlation_sig(n_ut) = correlation_sig(n_ut) + p(2,1);
            correlation_rlo(n_ut) = correlation_rlo(n_ut) + rlo(2,1);
            correlation_rup(n_ut) = correlation_rup(n_ut) + rup(2,1);
            
            if ~any(piece_of_ace == 0),
                %data_ratio(n_ut) = data_ratio(n_ut) + sum((piece_of_drift./piece_of_ace).^2);
                data_ratio(n_ut) = data_ratio(n_ut) + sum(abs(piece_of_drift))/sum(abs(piece_of_ace));
                ratio_count(n_ut) = ratio_count(n_ut) + 1;
            end;
            
            
        end;
        
    end;
    
    n_ut = n_ut + 1;
end;

%% plot the correlation
xplot = ut_interval/2 : ut_interval : 24;

plot(xplot, correlation_ut./datacount, 'b*-');

xticks = 0:2:23 ;

set(gca,'XTick', xticks);


xticks_lt = xticks - 5;

xticks_lt (xticks_lt <= 0) = xticks_lt (xticks_lt <= 0) + 24;

set(gca,'XTickLabel',  reshape(sprintf('%2.0f',xticks_lt),[2,length(xticks_lt)])' );

set(gca,'FontSize',16);
xlabel('Local Time (Hours)');
ylabel('Correlation');
legend('Jicamarca ISR - ACE IEF Ey');
grid on;


%% Make transfer function centered on each LT
% Overlapping 3 hour long windows
% take the peak TF amplitude (2 hour ?)
% hanning windows to minimize the variations at > 1 hour from the center

if ~exist('ace_all','var'),
    
    if ispc == 1,
        
        load E:\projects\ace_tensor\acedata\ace_2000_2010.mat ace_all;
        ace_all(:,1) = ace_all(:,1)+(17)/(60*24);%Try to advance 60 VS model/ ace time minutes
        load('C:\Manoj\projects\ace\jcamarca_isr_day_night.mat');
        
    end;
    if isunix == 1,
        load [ '/data/backup/mnair/projects/ace_tensor/acedata/ace_2000_2010.mat ace_all'];;
        ace_all(:,1) = ace_all(:,1)+(17)/(60*24);%Try to advance 60 VS model/ ace time minutes
        load [ '/data/backup/mnair/projects/ace/jcamarca_isr_day_night.mat'];
    end;
    
end;

%ace_all(:,2) = randn([1,length(ace_all)]) .* 1.7602 + 0204; % generate random values


ut_interval = 1;%hours
ut_window_radius = 1 ; %hours

%correlation stuff
datacount = zeros([1, 24 / ut_interval]);
correlation_ut = zeros([1,24 / ut_interval]);
correlation_sig = zeros([1,24 / ut_interval]);
correlation_rlo = zeros([1,24 / ut_interval]);
correlation_rup = zeros([1,24 / ut_interval]);

n_ut = 1;
min_piece_length = ( ut_window_radius * 60 / 5 ) * 2 - 1;

JULI_SEG = zeros([24,3000])*NaN; % so that at the end, all NaNs may be deleted
TIME_SEG = JULI_SEG; % Use of zeros is not good since there could be genuine zeros
ACE_SEG = JULI_SEG;



for i = 0 : ut_interval: ( 24 - ut_interval),
    
    N_seg = 1;
    
    L = (fday - floor(fday) >= (i/24)) & (fday - floor(fday) < ( (i + ut_interval) /24));
    
    this_ut_fday = fday(L);
    this_ut_drift = drift(L);
    
    K = floor(this_ut_fday);
    A = unique(K);
    
    
    
    for j = 1 : length(A),
        
        L = floor(this_ut_fday) == A(j);
        
        piece_of_drift =  this_ut_drift(L);
        piece_of_fday =  this_ut_fday(L);
        
        if length(piece_of_fday) >= 10, % some arbitary number - it should be > 1
            
            med_this_fday_index = find(fday ==  piece_of_fday(floor(length(piece_of_fday)/2) + 1));
            
            st = med_this_fday_index -  ut_window_radius  * 60 / 5 ;
            en = med_this_fday_index +  ut_window_radius  * 60 / 5 ;
            
            if st > 0 && en <= length(fday),
                
                piece_of_fday_extended = fday(st:en);
                
                if all(floor((diff(piece_of_fday_extended))*1e4)/1e4 == 0.0034), % making sure that the data are continuous
                    
                    piece_of_drift_extended = drift(st:en);
                    
                    if any(piece_of_fday_extended == 0),
                        display('paused');
                        pause;
                    end;
                    
                    %get ace data for this piece of data
                    
                    L = ace_all(:,1) >= ( piece_of_fday_extended(1) - 10/(60*24) ) & ...
                        ace_all(:,1) <= ( piece_of_fday_extended(end) + 10/(60*24) );
                    
                    ace_time = ace_all(L,1);
                    ace_data = ace_all(L,2);
                    
                    piece_of_ace = interp1(ace_time, ace_data, piece_of_fday_extended);
                    
                    if ~any(isnan(piece_of_ace))   && ~any(isnan(piece_of_drift_extended)) && ...
                            length(piece_of_ace)   >=  min_piece_length  && ...
                            length(piece_of_drift_extended) >=  min_piece_length,
                        
                        piece_of_drift_extended = piece_of_drift_extended - mean(piece_of_drift_extended);
                        piece_of_ace = piece_of_ace - mean(piece_of_ace);
                        %[R,P,RLO,RUP]=corrcoef(...) also returns matrices RLO and RUP,
                        %of the same size as R, containing lower and upper bounds for a 95%
                        %confidence interval for each coefficient.
                        
                        [a,p,rlo,rup] = corrcoef(piece_of_ace, piece_of_drift_extended);
                        datacount(n_ut) = datacount(n_ut )  + 1;
                        correlation_ut(n_ut) = correlation_ut(n_ut) + a(2,1);
                        correlation_sig(n_ut) = correlation_sig(n_ut) + p(2,1);
                        correlation_rlo(n_ut) = correlation_rlo(n_ut) + rlo(2,1);
                        correlation_rup(n_ut) = correlation_rup(n_ut) + rup(2,1);
                        
                        JULI_SEG(n_ut,N_seg:N_seg+min_piece_length-1) = piece_of_drift_extended(1:min_piece_length);%may apply hanning windows here
                        ACE_SEG(n_ut,N_seg:N_seg+min_piece_length-1) = piece_of_ace(1:min_piece_length);
                        TIME_SEG(n_ut,N_seg:N_seg+min_piece_length-1) = piece_of_fday_extended(1:min_piece_length);
                        
                        N_seg = N_seg + min_piece_length;
                        
                        
                    end;
                    
                end;
                
            end;
        end;
    end;
    
    n_ut = n_ut + 1;
end;

% N_seg = 1;
% n_ut = 1;
%
% for i = 1 : length(ACE_SEG) / min_piece_length,
%
% plot(TIME_SEG(n_ut,N_seg:N_seg+min_piece_length-1), JULI_SEG(n_ut,N_seg:N_seg+min_piece_length-1)/40,...
%     TIME_SEG(n_ut,N_seg:N_seg+min_piece_length-1) ,ACE_SEG(n_ut,N_seg:N_seg+min_piece_length-1));
%
%     N_seg = N_seg + min_piece_length;
%     pause;
%
% end;


%% Coherence, phase and tranfer function

JULI_SEG = JULI_SEG./40;
[Cxx,F] = mscohere(JULI_SEG,ACE_SEG,hanning(72),0,72,1/(5*60)); %1/(5*60) = sampling frequency in Hz (5*60 = 300 seconds)
[Pxy,F] = cpsd(ACE_SEG,JULI_SEG,hanning(72),0,72,1/(5*60)); %
phase = angle(Pxy);

for i = 1: n_ut -1,
    
    
    ace_data = squeeze(ACE_SEG(i,:));
    L = isnan(ace_data);
    ace_data(L) = [];
    juli_data = squeeze(JULI_SEG(i,:));
    L = isnan(juli_data);
    juli_data(L) = [];
    
    
    [Txy(i,:),F(i,:)] = tfestimate(ace_data,juli_data,hanning(min_piece_length),0,min_piece_length,1/(5*60));
    [Cxx(i,:),F(i,:)] = mscohere(ace_data,juli_data,hanning(min_piece_length),0,min_piece_length,1/(5*60));
    
end;
tf = conj(Txy');


%% Making an interploated , scaled correlogram for LT correction


load /data/backup/mnair/ace_tensor/ACE_Jicamarca_TF_Correlation_utint_1_radius_1

factor = 1/mean(correlation_ut(14:21)./datacount(14:21));

correlation_data = [correlation_ut./datacount correlation_ut./datacount correlation_ut./datacount] *  factor ;


%  factor (=1./mean(correlation(14:21)) for scaling the data.
%  the idea is the the mean power between 9-16LT should have a
%  multiplication factor 1.

local_t_xscale = [-23.5 : 1: -0.5 0.5:1:48] - 5;

%  [s] = csaps(local_t_xscale,correlation_data);
%  sn = fnxtr(s);%This methods gives superior extrapolation !
%  correlation_smoothed = ppual(sn,local_t_xscale);
%  correlation_smoothed = correlation_smoothed(25:48);

b1 = min(local_t_xscale) :3: max(local_t_xscale);
sp=spline(b1,correlation_data/spline(b1,eye(length(b1)),local_t_xscale));
v=ppval(local_t_xscale,sp);

correlation_smoothed = v(25:48);

%% PREDICT THE JICAMARCA / JULIA FIELDS WITH TF MODEL & LT dependency
% The idea is that, the tf should be used to predict the pp disturbance field
% at any local time along any longitude sector on equator.

% 1) Use the tf function to calculate the day-time response
% 2) Use the lt response function calculate the LT response
% 3) Multiply the above two results to give the final response

% For comparing the results with the observations
% 1) Remove the quiet day variation
% 2) Make sure that the sampling interval is 5 minutes



% create JICAMARCA ISR Quiet day average

if ispc  == 1,
    load E:\projects\geomag\indices\aplist.mat;
    load('C:\Manoj\projects\ace\jcamarca_isr_day_night.mat');
end;

if isunix == 1,
    load [ '/data/backup/mnair/projects/geomag/indices/aplist.mat'];
    load('[ '/data/backup/mnair/projects/ace/jcamarca_isr_day_night.mat');
end

fday = roundn(fday,-4); % remove the small fluctuations in time (like 20:09:59 tp 20:10:00 )

A = unique(floor(fday)); % get the unique days for whic ISR dat are available

isr_quiet_day_matrix = zeros([length(A),288])*NaN;

fday_decimals = roundn((0: (5/(24*60)) : 1 - (5/(24*60))),-4);
N_data = 1;
for i = 1 : length(A),
    
    L = floor(fday) == A(i);
    
    this_fday = fday(L);
    this_drift = drift(L);
    
    L1 = fday_ap >= this_fday(1) & fday_ap <= this_fday(end);
    
    this_ap = nanmean(ap(L1));
    
    if this_ap < 20,
        
        a = this_fday - floor(this_fday);
        
        index_i = 1;
        clear index_a;
        for ii = 1: length(a),
            L = abs(fday_decimals - a(ii)) < 1e-3;
            
            if sum(L) == 1,
                index_a(index_i) = find(L==1);
                index_i = index_i + 1;
            end;
        end;
        
        
        isr_quiet_day_matrix(N_data,index_a) =  this_drift;
        N_data = N_data + 1;
        
        
    end;
end;

quiet_day_var = nanmean(isr_quiet_day_matrix,1);

b1 = min(fday_decimals) :30/(24*60): max(fday_decimals); % sample at 15 min interval
sp=spline(b1,quiet_day_var/spline(b1,eye(length(b1)),fday_decimals));
v=ppval(fday_decimals,sp);

plot([0:5/(60*24):0.9999],nanmean(isr_quiet_day_matrix,1));
hold on
plot(fday_decimals,v,'r');

%save E:\projects\ace_tensor\jicamarca_quiet_average quiet_day_var
%fday_decimals sp v=

%% Produce filter coefficients from Ty estimates
if ispc == 1,
    load E:\projects\ace_tensor\Electric_Field_Matrix seg_length EEF_SEG IEF_EY_SEG IEF_EZ_SEG TIME_SEG;
end;

if isunix == 1,
    
    eval(['load ' [ '/data/backup/mnair/ace_tensor/Electric_Field_Matrix seg_length EEF_SEG IEF_EY_SEG IEF_EZ_SEG TIME_SEG'] ]);
end;


EEF_SEG = EEF_SEG.*24.366*1e-3; %mV/m

[Tx,F] = tfestimate(IEF_EY_SEG,EEF_SEG,hanning(seg_length),0,seg_length);
[Ty,F] = tfestimate(IEF_EZ_SEG,EEF_SEG,hanning(seg_length),0,seg_length);


%%
[b,a]=invfreqz(Tx,F,5,5,[],3000);

%[b,a]=invfreqz(Ty,F,3,2,[],30);
%Ty(1:6) = 0.0;

[b,a]=invfreqz(Ty,F,5,5,[],3000);

%a=a_n;b=b_n;

len = 72;
step = zeros([1,len]);
step(13:36) = 1; % positive  box
figure(1);
plot((1:5:len*5)./60,step,'b-','LineWidth',4);
grid on;
hold on;
figure(2);
plot((1:5:len*5)./60,filter(b,a,step).*1.0,'b.-','LineWidth',4);%1.5 was for tf made only from Julia
hold on;
grid on;

len = 72;
step = zeros([1,len]);
step(13:end) = 1; % positive step
figure(1);
plot((1:5:len*5)./60,step,'r-','LineWidth',2);
figure(2);
plot((1:5:len*5)./60,filter(b,a,step).*1.0,'r-','LineWidth',2)

step = zeros([1,72]);
step(25) = 1; % dac delta
figure(1);
plot((1:5:len*5)./60,step,'k-','LineWidth',3);
hold on;
figure(2);
plot((1:5:len*5)./60,filter(b,a,step).*1.0,'k-','LineWidth',3)
grid on;

step = zeros([1,72]);
step(30:54) = triang(25)';
figure(1);
plot((1:5:len*5)./60,step,'c-','LineWidth',2);
hold on;
figure(2);
plot((1:5:len*5)./60,filter(b,a,step).*1.0,'c-','LineWidth',2)
grid on;


figure(1);
set(gca,'FontSize',16);
axis([0,6,-1,2]);
xlabel('Time (hours)')
ylabel('IEF Ey mV/m');


figure(2);
set(gca,'FontSize',16);
%axis([0,6,-0.1,0.1]);
axis([0,6,-0.01,0.01]);
%set(gca,'Ytick',[-0.05, -0.025,0,0.025,0.05]);
xlabel('Time (hours)');
ylabel('Equatorial Zonal EF mV/m');




%% Read the TIR - PND data, calculate the delh and convert that to EEF

time_array_min = (datenum(1995,1,1,0,0,30): (1/(24*60)): datenum(2010,12,31,23,59,30))';

nc_fname = '[ '/data/backup/mnair/projects/iig_mag_data/TIR_1995_2010.nc';


S1 = dir(nc_fname);
if ~isempty(S1),
    ncid = netcdf.open(nc_fname,'NOWRITE');
    X_ID = netcdf.inqVarID(ncid,'Magnetic_Field_X');
    Y_ID = netcdf.inqVarID(ncid,'Magnetic_Field_Y');
    Z_ID = netcdf.inqVarID(ncid,'Magnetic_Field_Z');
end;

x_data = netcdf.getVar(ncid, X_ID, 0, length(time_array_min));
x_data = double(x_data)/10;
x_data(x_data==99999.9) = NaN;

y_data = netcdf.getVar(ncid, Y_ID, 0, length(time_array_min));
y_data = double(y_data)/10;
y_data(y_data==99999.9) = NaN;

netcdf.close(ncid);


h_tir = sqrt(x_data.^2 + y_data.^2) ;


nc_fname = '[ '/data/backup/mnair/projects/iig_mag_data/PND_1995_2010.nc';


S1 = dir(nc_fname);
if ~isempty(S1),
    ncid = netcdf.open(nc_fname,'NOWRITE');
    X_ID = netcdf.inqVarID(ncid,'Magnetic_Field_X');
    Y_ID = netcdf.inqVarID(ncid,'Magnetic_Field_Y');
    Z_ID = netcdf.inqVarID(ncid,'Magnetic_Field_Z');
end;

x_data = netcdf.getVar(ncid, X_ID, 0, length(time_array_min));
x_data = double(x_data)/10;
x_data(x_data==99999.9) = NaN;

y_data = netcdf.getVar(ncid, Y_ID, 0, length(time_array_min));
y_data = double(y_data)/10;
y_data(y_data==99999.9) = NaN;

netcdf.close(ncid);

h_pnd = sqrt(x_data.^2 + y_data.^2) ;


% delete the data before 2001 and after 2003

time_start = datenum(2001,1,1,0,0,30);
time_end = datenum(2003,12,31,23,59,30);

L = time_array_min >= time_start & time_array_min <= time_end;

h_pnd(~L) = [];
h_tir(~L) = [];

% reshaping the vector to matrix of days x 1440



h_tir_mat = reshape(h_tir,[1440,1576800/1440]);
h_pnd_mat = reshape(h_pnd,[1440,1576800/1440]);




% remove night-time average from each day

tir_night_av = nanmean(h_tir_mat(1000:1100,:));

h_tir_mean_removed_mat = h_tir_mat - repmat(tir_night_av,[1440,1]);

pnd_night_av = nanmean(h_pnd_mat(1000:1100,:));

h_pnd_mean_removed_mat = h_pnd_mat - repmat(pnd_night_av,[1440,1]);


% Things to do
% 1 . Convert the delta H to EEF 2. Remove the quiet day average

% OK, remove the quiet day average

ind_delh = h_tir_mean_removed_mat - h_pnd_mean_removed_mat;
b1 = [1         180         360         540         720         900        1080        1260        1440];

global_average = nanmean(ind_delh,2)';

%%
for i = 1 : 1095,
    
    
    
    x = 1:1440;
    y = ind_delh(:,i)';
    
    
    L = isnan(y);
    
    if sum(L)/length(L) < 0.2 & sum(L) ~= 0,
        y = interp1(x(~L),y(~L),x);
    end;
    
    if sum(isnan(y)) == 0,
        
        sp=spline(b1,y/spline(b1,eye(length(b1)),x));
        v=ppval(x,sp);
        
    else
        v = global_average;
        
    end;
    
    
    %plot
    
    %     subplot(211);
    %     plot(x,y,x,v);
    %     subplot(212);
    %     plot(x,y-v,x,y- global_average);
    %     title('Global Average');
    %     pause;
    
    ind_climate_rem(i,:) = y - v;
end;



%% Comapre the observed EEF at Jicamarca ISR and India delta H with that of predicted fields..
%


load ([ '/data/backup/mnair/ace_tensor/ind_delh_array.mat']);
load /data/backup/mnair/ace_tensor/predicted_eef.mat  eef_fday eef_response % Tensor AP ALL OLD
%load /data/backup/mnair/longp/predicted_eef eef_fday eef_response; % AP GT20 NEW
load ([ '/data/backup/mnair/geomag/indices/aplist.mat']);
load ([ '/data/backup/mnair/ace_tensor/acedata/ace_2000_2010.mat']);
load ([ '/data/backup/mnair/ace_tensor/Drift_Data_Combined_2000_2011.mat']);
load ([ '/data/backup/mnair/ace/Julia_W_new1']);
load /data/backup/mnair/ace/jcamarca_isr_fejer.mat eef;
% jicamarca eef data contains
% eef(:,1) = fday - datenum(2000,1,1);
% eef(:,4) = fday_lt;
% eef(:,6) = drift*23000/1e6;
% eef(:,15) = fejer(:,2)*23000/1e6;
load  /data/backup/mnair/longp/jicamarca_isr_2000_2005_sigma
% the above Jicamarca ISR files contains the drift average for
% while sigma < 1 for different height ranges

% To use the existing  variable for ACE fday.
% convert fday (referenced to 1-1-2000 to MATLAB date correpond to Jan-1-0000
%fday = ntimestamps;
% WHen using the data sent by Anderson
fday = [ eef(:,1) + datenum(2000,1,1)]';

% Add the propagation delay for predicted EEF signal (this is because
% the transfer function doesn't apply this delay)

% Note
%eef_fday = eef_fday + 17/(60*24) ; % This shift is required is using the
%new eef prediction for AP > 20 
eef_fday = eef_fday ; % no shift is required for older prediction (already corrected for)


% Add the propagation delay for IEF from BSN to ionosphere
ace_all(:,1) = ace_all(:,1) + 17/(60*24)  ;

% To use only JULIA data
% Put the data into Drift data

%  Drift_Comb_fday_vec = floor(Julia_fday(:,100)) + datenum(2000,1,1);
%  Drift_Comb_W = w_climate;
%  Drift_Comb_fday = Julia_fday + datenum(2000,1,1);

% Use Jicamarca ISR dates
A = unique(floor(fday)); % get the unique days for whic ISR dat are available

% Use both Jicamarca ISR and JULIA / delta H

%A = unique([floor(fday) Drift_Comb_fday_vec' ]);


% Use JULIA dates
%A = floor(Julia_fday(:,100)) + datenum(2000,1,1); % get the unique days
%for which JULIA data are available

%Limit the dates to when India data are available
%  L = A >= datenum(2001,1,1) & A <= datenum(2004,1,1);
%  A(~L) = [];

%use specified dates interval
%A = datenum(2001,1,1) : datenum(2004,1,1); % All dates
%
% fday_decimals = roundn((0: (5/(24*60)) : 1 - (5/(24*60))),-4);
%
% selected_index = [21, 30, 55, 61, 65];
%selected_index = [21, 30, 61, 65];


%%

jic_flag = 0;
data_flag = 0;
n_phase_delay_corr = 1;
options.ap_lower_limit = 20; % FIXED at 20 lower limt of Ap

% selected dates
% 2001-12-12,

phase_delay_correction = [0 0 0 0 0 0]*1; % Cosmetic adjustment to compensate slight erroneous 
% time delay corrections by OMNI

% [251,279,505,1046,1047,1048];% ISR & Combined JULIA & Delta H data sets
% The above dates are used in the PP LP paper (
%for i = [251,279,505,1046,1047,1048];% ISR & Combined JULIA & Delta H data sets
for i = 38:length(A)
    jic_flag = 0;
  
    data_flag = 0;
    %find the coincident jicamarca data
    L = floor(fday) == A(i);
        jicamarca_quiet_day     = eef(L,15);
        jicamarca_eef_piece_old = eef(L,6);
        jicamarca_eef_fday_old      = fday(L)';
        
    LL = floor(ntimestamps) == A(i);
    
    if sum(L) > 10,
        % get the jicamarca isr data
%         jicamarca_eef_piece = eef(L,6);
%         jicamarca_eef_fday = fday(L);
%         jicamarca_eef_piece = jicamarca_drift_500(LL) * 23000 / 1e6;
%         jicamarca_eef_fday      = ntimestamps(LL);
        
        jicamarca_eef_piece = [jicamarca_drift_500(LL) * 23000 / 1e6];
        jicamarca_eef_fday      = ntimestamps(LL);
        L = isnan(jicamarca_eef_piece);
        jicamarca_eef_piece(L) = [];
        jicamarca_eef_fday(L) = [];

        %         jicamarca_eef_piece     = eef(L,6);
        %         jicamarca_eef_fday      = fday(L);
        %         jicamarca_quiet_day     = eef(L,15);
        %         jicamarca_150_eef = [jicamarca_drift_150(LL) * 23000 / 1e6]';
        %         jicamarca_600_eef = [jicamarca_drift_600(LL) * 23000 / 1e6]';
        %         jicamarca_gt_500_eef = [jicamarca_drift_gt_500(LL) * 23000 / 1e6]';
        %         jicamarca_two_level_fday = jicamarca_fday(LL)';
        jic_flag = 1;
        
        
    else
        jic_flag = 0;
        
    end;
    
    % the combined data set of Peru EEF (delta H and JULIA)
    k = find(Drift_Comb_fday_vec == A(i));
    
    if length(k) > 1,
        k = k(1);
    end;
    
    % to remove deltha H at Peru from further analysis
    
   
    
    if ~isempty(k),
        
        
        jic_delh_orjulia_eef = Drift_Comb_W(k,:)*24.366*1e-3 - nanmean(Drift_Comb_W(k,:)*24.366*1e-3);
        jic_delh_orjulia_fday = Drift_Comb_fday(k,:);
        LLL = isnan(jic_delh_orjulia_eef) | isnan(jic_delh_orjulia_fday);
        jic_delh_orjulia_eef(LLL) = [];
        jic_delh_orjulia_fday(LLL) = [];
        data_flag = 1;
    else
        
        jic_delh_orjulia_eef = [];
        jic_delh_orjulia_fday = [];
        
    end;
    
    L = ace_all(:,1) >= A(i) & ace_all(:,1) < A(i) + 1;
    
    %Get the ACE derived IEF aata
    ief_ey = ace_all(L,2);
    ief_ez = ace_all(L,3);
    ief_fday = ace_all(L,1);
    
    L = eef_fday >= A(i) & eef_fday < A(i) + 1;
    
    %get the predicted PP EF data
    predicted_eef_ut = eef_response(L);
    predicted_eef_ut_fday = eef_fday(L);
    
    % Get the delta-H data from Indian sector. Limit to local day-time
    L = india_fday >= A(i) + (8 - 5.5) / 24 & india_fday <= A(i) + (18 - 5.5)/ 24;
    
    if sum(L) > 10,
        india_delh = india_climate_rem_delh_array(L);
        ind_fday = india_fday(L);
        data_flag  = data_flag + 1;
    else
        
        india_delh = [];
        ind_fday = [];
    end;
    
    %Find the mean Ap index during this day
    L = fday_ap >= A(i) & fday_ap < A(i) + 1;
    mean_ap = nanmean(ap(L));
    
    
    %Apply LT correction
    
    % Jicamarca
    jicamarca_eef_lt = pp_lt_response(predicted_eef_ut_fday, -5);
    % India
    
    india_eef_lt = pp_lt_response(predicted_eef_ut_fday, 5.5);
    
    
    
    %plot
    
    if mean_ap > options.ap_lower_limit && ( data_flag > 1 || ( jic_flag == 1 && data_flag > 0) ),
        
        subplot(311);
        
        plot((ief_fday-floor(ief_fday))*24,ief_ey,(ief_fday-floor(ief_fday))*24,ief_ez, 'LineWidth', 2);
        
        %plotting only IMF Ey
        plot((ief_fday-floor(ief_fday))*24,ief_ey, 'LineWidth', 2);
        
        axis([0 24 -inf inf]);
        
        xticks = 0:2:24 ;
        
        set(gca,'XTick', xticks);
        
        
        %     xticks_lt = xticks - 5;
        %
        %     xticks_lt (xticks_lt <= 0) = xticks_lt (xticks_lt <= 0) + 24;
        %
        %     set(gca,'XTickLabel',  reshape(sprintf('%2.0f',xticks_lt),[2,length(xticks_lt)])' );
        
        set(gca,'FontSize',16);
        xlabel('UT (Hours)');
        ylabel('IEF mV/m');
        legend('IEF Ey');
        %text(0,0,'ACE IEF Ey', 'FontSize',20)
        
        title(['Date = ' datestr(A(i)) ', ' sprintf('Mean Ap = %7.0f', mean_ap)]);
        
        
        subplot(312);
        
        if jic_flag == 1 && length(jicamarca_eef_fday) > 10,
            
            % The latest processed jicamarca isr data needs climatology correction
            b1 = jicamarca_eef_fday(1):180/1440:jicamarca_eef_fday(end);
            
            if length(b1) > 2
                sp = spline(b1, jicamarca_eef_piece/spline(b1,eye(length(b1)), jicamarca_eef_fday));
                y = ppval(sp, jicamarca_eef_fday);
            else
                y = zeros([1,length(jicamarca_eef_piece)])';
            end;
            
            jic_data = runmean(jicamarca_eef_piece - y', 2) ;
            
%             plot((jicamarca_eef_fday - floor(jicamarca_eef_fday) ) * 24, ...
%                 jicamarca_eef_piece - y' , 'LineWidth', 2);
                plot((jicamarca_eef_fday - floor(jicamarca_eef_fday) ) * 24, jic_data,'LineWidth', 2);
            hold on;
            
                % runmean on window 2*M + 1 
               jic_data = runmean(jicamarca_eef_piece_old - jicamarca_quiet_day, 2) ;
                
                  plot((jicamarca_eef_fday_old - floor(jicamarca_eef_fday_old ) ) * 24, ...
                      jic_data , 'k');
                  hold on;
            
        end;
        
        if length (jic_delh_orjulia_fday ) > 10,
            
            b1 = jic_delh_orjulia_fday(1):180/1440:jic_delh_orjulia_fday(end);
            
            if length(b1) > 2
                sp = spline(b1, jic_delh_orjulia_eef/spline(b1,eye(length(b1)), jic_delh_orjulia_fday));
                y = ppval(sp, jic_delh_orjulia_fday);
            else
                y = zeros([1,length(jic_delh_orjulia_fday)]);
            end;
            
            
             if k > 1480, %Peru Del H
        
            plot((jic_delh_orjulia_fday - floor(jic_delh_orjulia_fday) ) * 24, ...
                jic_delh_orjulia_eef - y, 'b--','LineWidth', 2);
            hold on;
             else % JULIA
                 plot((jic_delh_orjulia_fday - floor(jic_delh_orjulia_fday) ) * 24, ...
                jic_delh_orjulia_eef - y, 'g','LineWidth', 2);
            hold on;
             end;
                 
        end;
        
        
        plot((predicted_eef_ut_fday - floor(predicted_eef_ut_fday))*24, ...
            jicamarca_eef_lt .* predicted_eef_ut,'r', 'LineWidth', 2);
        
              
        axis([0 24 -inf inf]);
        
        xticks = 0:2:24 ;
        
        set(gca,'XTick', xticks);
        
        
        xticks_lt = xticks - 5;
        
        xticks_lt (xticks_lt <= 0) = xticks_lt (xticks_lt <= 0) + 24;
        
        set(gca,'XTickLabel',  reshape(sprintf('%2.0f',xticks_lt),[2,length(xticks_lt)])' );
        
        set(gca,'FontSize',16);
        xlabel('Local Time (Hours)');
        ylabel('EEF mV/m');
        legend('Observed', 'Predicted');
        
        text(0,0,'South America', 'FontSize',20)
        hold off;
        
        subplot(313);
        
        %Calculate the EEF in Indian sector following the eqution
        % Drift  = 5.2889 + 0.1947DH + 0.0001DH^2- 0.0000021DH^3 by
        %Anderson et al 2004 Space Weather and using B = 37000 nT (2004,
        %trivandrum, IGRF at 150 km)
        
        eef_india = (5.2889 + 0.1947*india_delh + 0.0001*(india_delh).^2 -...
            0.0000021*(india_delh).^3) * 37000 / 1e6;
        
%         plot((ind_fday - floor(ind_fday))*24 + phase_delay_correction(n_phase_delay_corr)/1440 ...
%         , eef_india - mean(eef_india), 'LineWidth', 2);
     plot((ind_fday - floor(ind_fday))*24 ...
        , eef_india - mean(eef_india), 'LineWidth', 2);
        hold on;
        plot((predicted_eef_ut_fday - floor(predicted_eef_ut_fday))*24, india_eef_lt.*predicted_eef_ut,'r', 'LineWidth', 2);
        axis([0 24 -inf inf]);
        
        xticks = 0:2:24 ;
        
        set(gca,'XTick', xticks);
        % This is to increment the phase delay array index. This should be
        % used for plotting a few data sets. For large data scrolling,
        % remove this and its other references.
        n_phase_delay_corr = n_phase_delay_corr + 1;
        
        xticks_lt = xticks + 5.5;
        
        xticks_lt (xticks_lt >= 24) = xticks_lt (xticks_lt >= 24) - 24;
        
        set(gca,'XTickLabel',  reshape(sprintf('%4.1f',xticks_lt),[4,length(xticks_lt)])' );
        
        set(gca,'FontSize',16);
        xlabel('Local Time (Hours)');
        ylabel('EEF mV/m');
        legend('Observed','Predicted');
        text(0,0,'India', 'FontSize',20)
        hold off;
        
        pause;
        
    end;
    clf;
    
    
end;

close all
%test July 13, 2011
%test Julyy 13, 2011
%test July 17 2011 Home MacPro
