% S ←∅
% repeat
% (w, ξ) ← solution to QP only with constraints from S
% for i=1,. . . ,n do
% y i ← argmaxy∈Y ∆(y i , y) + w, φ(x i , y)
% ˆ
% end for
% S ← S ∪ { (x 1 , . . . , x n ), (y^1 , . . . , y^n ) }
% 
% until S doesn’t change anymore.

%output w
% to be used in prediction fn   
%     f(x) = arg max <w, φ(x, y)> .
%              y∈Y
clear;
  dual_qp= true;
  
  if ~dual_qp
      cd cvx
      cvx_setup
      cd ..
  end
% load data
[y, X] = svmlread('example4/train.dat');
X = full(X);

% define variables

C = 5000;
m = size(X,1);
X(:,54) = ones(m,1); %for bias wb
n = size(X,2);
ny = 1;
K=7;


for i=1:m
    S(i).x = X(i,:)';
    S(i).y = y(i);
    S(i).yhat = []; %yhat is y most violated constraints
    S(i).loss_yhat = [];
    S(i).x_feat={};
  

end

param.verbose=1;
SSVM_LossFnHandle = @SSVM_Loss;

iter=0;
sai=[]; %ξ

wsol = zeros(n*K,1);
alpha=[];
S_changing = false;



    epsilon=0.1;
    maxiter=30;
     

prev_S = [];
G=[];
while ~isequal(prev_S,S);

    iter = iter+1;  
    if iter > maxiter 
        break;
    end
   prev_S = S;
   
   
      for i=1:m
        
         [yhat(i,:) infval] = SSVM_LossAumentedInference(wsol, S(i).x,S(i).y,K);
         chckinfval= wsol'*SSVM_feature(S(i).x,yhat(i,:),K)+SSVM_Loss(S(i).y,yhat(i,:));
         if(abs(chckinfval - infval) > 0.000001)
             fprintf('checking LossAugmentedInf error, LossAugmentedVal = %d, manual cal=%d\n',infval,chckinfval);
             pause;
         end
        
        
         if(size(yhat(i,:),2) ~=1)
             disp('size yhat(i:) error');
             pause;
         end
          x_feature(i,:) = SSVM_feature(S(i).x,S(i).y,K) - SSVM_feature(S(i).x,yhat(i,:),K);
         cur_gap_i = SSVM_Loss(S(i).y,yhat(i,:)) - wsol'*x_feature(i,:)';	

        kk = size(S(i).yhat,2);
         max_H = -Inf;         
        for ki = 1:kk
            cur_H =  SSVM_Loss(S(i).y,S(i).yhat(ki)) - wsol'*S(i).x_feat{ki}';
           if cur_H > max_H
                max_H = cur_H;
           end
        end
        sai_i(i) = max(0,max_H);


         if cur_gap_i >= sai_i(i)+epsilon
             disp('in');
            S(i).yhat(end+1) = yhat(i,:);
            S(i).loss_yhat(end+1) = SSVM_Loss(S(i).y,yhat(i,:));
            S(i).x_feat{end+1} = x_feature(i,:);
        
    
       
         %1.2 %call QP solver
       
   if dual_qp
        % primial and dual no_constraints are not equal.
         Psai = cell2mat([S.x_feat]');        
         lossvec = [S.loss_yhat];
         no_constraints = size(Psai,1);
         G = calConstraints(S);
         [alpha fval] = mySVMquadprocOpt(Psai,lossvec,C,G,no_constraints,m);
         size(alpha)
         %fval = -fval;        
         wsol = Psai'*alpha;
         w = wsol(1:2);
         b = wsol(3);
        fprintf('---Iter %d, sample no %d, dual no.constraints %d------\n',iter,i,no_constraints);
   else
         %[Psai lossvec no_constraints] = calConstraintsPrimal(S);        
        Psai = cell2mat([S.x_feat]');        
        lossvec = [S.loss_yhat]';
        no_constraints = size(Psai,1);

cvx_begin
variables w(n*K) xi(m)
minimize 1/2*sum(w.*w) + C/m*sum(xi)
Psai*w >= lossvec -  xi_iy(xi,S);
xi >= 0;
cvx_end


   fprintf('---Iter %d, sample no %d, dual primal no.constraints %d------\n',iter,i,no_constraints);
   wsol = w;
   b = w(3);
   fval=cvx_optval
   end
       
      %    fprintf('iter %d: Current Solution w=[%.4f, %.4f]  b=%.4f fval=%.4f\n',iter,w,b,fval);
      
       
         end
      end
          
  
    
     
end
 
% test

[ytest Xtest] = svmlread('example4/test.dat');
Xtest = full(Xtest);
mtest = size(Xtest,1);
Xtest(:,54) = ones(mtest,1); %for bias wb
ypred = zeros(mtest,1);
for i =1:mtest
    ypred(i) = SSVM_predInference(wsol,  Xtest(i,:)', K);
end
total_correct = sum(ypred == ytest);
accuracy = 100*total_correct/mtest;
errorrate = 100-accuracy;
fprintf('test %d samples, correctly classified %d samples, accuracy %.4f%%, error %.4f%%, obj value\n',mtest,total_correct,accuracy,errorrate,fval);
fprintf('norm solution w=%d\n',norm(wsol));
%BELOW are the correct result from Joachim SVMstruct.
%compare with below, if accuracy and optimization objective value are the same, then we declare "Correct"



% Iter 20 (300 active): ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++(NumConst=804, SV=279, CEps=0.0000, QPEps=0.0493)
% Final epsilon on KKT-Conditions: 0.10000
% Upper bound on duality gap: 8.78339
% Dual objective value: dval=336978.31022
% Total number of constraints in final working set: 804 (of 804)
% Number of iterations: 20
% Number of calls to 'find_most_violated_constraint': 6000
% Number of SV: 279 
% Number of non-zero slack variables: 209 (out of 300)
% Norm of weight vector: |w|=252.00722
% Norm. sum of slack variables (on working set): sum(xi_i)/n=61.04665
% Norm of longest difference vector: ||Psi(x,y)-Psi(x,ybar)||=28.06128
% Runtime in cpu-seconds: 7.48 (99.19% for QP, 0.19% for Argmax, 0.10% for Psi, 0.00% for init)
% Compacting linear model...done
% Writing learned model...done
% peerajak@wijai1 /working/peerajak/ChulaQE/Semister6/Implementation/0_MyFastSSVM/standard_multiclass_ssvm $ ./classify.sh 
% Reading model...done.
% Reading test examples... (2000 examples) done.
% Classifying test examples...done
% Runtime (without IO) in cpu-seconds: 0.00
% Average loss on test set: 32.5500
% Zero/one-error on test set: 32.55% (1349 correct, 651 incorrect, 2000 total)

