% data setup: our data contains two classes, each N samples. The data is 2D
clc;
clear all;
close all;


shouldwePlot = 1;%enable/disable plotting from now on
generatehand_data = 1; % if true, randomized sampling from training will be ignored.



% Generating Data 2 gaussian clusters (2*N points with N positivs and N negatives)
% splitting into 2 parts: training will contain 2*Ntrain examples.
N = 500;
Ntrain = 300; 
l = [ones(Ntrain,1); -ones(Ntrain,1); ones(N-Ntrain,1); -ones(N-Ntrain,1)]; % label 2N LENGTH
d = [l/2 + randn(2*N,1)/1.5  l/2-randn(2*N,1)/1.5];                           % data
trainingdata = [d(1:2*Ntrain,:) l(1:2*Ntrain)];                             % of length 2*Ntrain
testdata = [d(2*Ntrain+1:2*N,:) l(2*Ntrain+1:2*N)];                         % of length 2*(N - Ntrain).

% doing logistic regression : Prof. Rudin's plugin.
B = glmfit(trainingdata(:,1:2), [0.5*trainingdata(:,3)+0.5 ones(length(trainingdata(:,3)),1)], 'binomial', 'link', 'logit');


% find probabilities on training. not necessary for further calculations.
Ftrain=B(1)+trainingdata(:,1:2)*B(2:end);   % This is the vector of f(x_train) values. Not useful.
Proba1=ones(length(trainingdata(:,3)),1)./(ones(length(trainingdata(:,3)),1)+exp(-Ftrain));
Proba0=ones(length(trainingdata(:,3)),1)-Proba1;
% Finding pts whose predicted labels are 1,0 & between threshold centered at 0.5 resp
thresh=0.65;
Index0training=find(Proba0>thresh);
Index1training=find(Proba1>thresh);
IndexErrorZonetraining=find(abs(max(Proba1,Proba0)-0.5)<thresh-0.5);

% Plotting training data only with the decision boundary.
if(shouldwePlot == 1)
    figure;
    scatter(trainingdata(Index0training,1),trainingdata(Index0training,2),'r.'); hold on;
    scatter(trainingdata(Index1training,1),trainingdata(Index1training,2),'b.');
    scatter(trainingdata(IndexErrorZonetraining,1),trainingdata(IndexErrorZonetraining,2),'g');   hold off;
    axis equal;
end



% Generating test points

% find probabilities on testdata. required for one fo the if cases below.
Ftestfull=B(1)+testdata(:,1:2)*B(2:end);
Proba1fulltest=ones(length(testdata(:,3)),1)./(ones(length(testdata(:,3)),1)+exp(-Ftestfull));
Proba0fulltest=ones(length(testdata(:,3)),1)-Proba1fulltest;
    
% Selecting the first 5 points randomly from the two clusters.
delta = 0.0; % a range between 0,1-thresh. Pushes the test points away from boundary.
extremalx1 = 0; % the left limit for choosing the 6th node on the first dimension
pos = find(Proba1fulltest>thresh+delta); a = 1; b = length(pos); pos_tl1 = pos;
pos_l1 = pos(round(unifrnd(a,b,3,1)));
pos = find(Proba0fulltest>thresh+delta); a = 1; b = length(pos); pos_tl0 = pos;
pos_l0 = pos(round(unifrnd(a,b,2,1)));

%We want the 6th point to be having almost p~0.5 and far away on decision
%boundary. actually finding the 6th 'extremal' node below:
Indexset_for_6th_node=find(abs(max(Proba1fulltest,Proba0fulltest)-0.5)<thresh-0.5);
Indexset_extremal = find(testdata(Indexset_for_6th_node,1)>extremalx1);
[tempval pos_6thnode] = max(testdata(Indexset_for_6th_node(Indexset_extremal),1));    

pos_t6 = Indexset_for_6th_node(Indexset_extremal(pos_6thnode));

if(generatehand_data==1)
    %for legacy
    indexforappend = length(testdata);
    pos_l1 = [indexforappend+1:1:indexforappend+3]';
    pos_l0 = [indexforappend+4:1:indexforappend+5]';
    pos_t6 = [indexforappend+6];
    %first 5 test points
    lt = [1; 1; 1; -1; -1];
    dt = [lt/2 + randn(5,1)/5  lt/2 - randn(5,1)/5];
    testdata(indexforappend+1:indexforappend+5,:) = [dt lt];
    %the 6th point
    xt = [-1:0.1:1];                          epsilonval = 1e-6;
    m = -(B(2)/(B(3) + epsilonval));    c = -B(1)/(B(3)+epsilonval);                     
    yt = m*xt +c;
    testdata(indexforappend+6,:) = [xt(end) yt(end) -1];    
end

%The indices for these carefully chosen 6 nodes are
pos_test = [pos_l1 ; pos_l0 ; pos_t6];

%plotting the test nodes
if(shouldwePlot == 1)
    figure;
    scatter(testdata(pos_tl0,1),testdata(pos_tl0,2),'m.');hold on;
    scatter(testdata(pos_tl1,1),testdata(pos_tl1,2),'y');
    scatter(testdata(pos_l1,1),testdata(pos_l1,2),'r'); 
    scatter(testdata(pos_l0,1),testdata(pos_l0,2),'b');
    scatter(testdata(pos_t6,1),testdata(pos_t6,2),'c^'); axis equal;
    hold off;
end

%checking probabilties of test nodes
%Assigning probabilities to the 6 test nodes based on theta evaluated
F6node=B(1)+testdata(pos_test,1)*B(2)+testdata(pos_test,2)*B(3); % hardcoded 2d

q = ones(length(pos_test),1)./(ones(length(pos_test),1)+exp(-F6node));

%Model 2 overwriting the values of q vector here.
q = log( ones(length(pos_test),1) + exp(F6node) )

training_loss_grid;
route_cost_grid;
adding_the_two_costs;
