% 1004 VEG
% 1100 WIRE
% 1103 POLE
% 1200 GROUND
% 1400 FACADE
function [ predictedLabels, continuousLabels, predictedWeightVector, errors, onlineErrors] = ExponentialGradientDescent( Data, labelToClassify )
    fprintf('initial setup\n');
    data = Data(randperm(size(Data,1)),:);
    numFeatures = size(Data,2) - 5;
    scalingFactor = 1.0;
    [numDataPoints, cols] = size(data);
    learningRate = 1.0/sqrt(numDataPoints);
    featureVectors = zeros(numDataPoints, 10);
    labels = MakeLabelsBinary(data(:, 5), numDataPoints, labelToClassify);
    continuousLabels = data(:,5);
    predictedLabels = zeros(numDataPoints, 1);
    predictedWeightVector = ones(numFeatures, 1)*(1/numDataPoints);
    positiveWeightVector = ones(numFeatures, 1)*(1/(2*numDataPoints))*scalingFactor;
    negativeWeightVector = ones(numFeatures, 1)*(1/(2*numDataPoints))*scalingFactor;
    
    for f = 1:numDataPoints
        featureVectors(f, :) = data(f, 6:cols)';
    end
    
    fprintf('Doing gradient descent ...\n');
    
    %Iterate over the training data 10 times
    for k = 1:10
        fprintf('iter %d\n', k);
        % Do online learning for each data point.
        for i = 1:numDataPoints
            
            if(mod(i, numDataPoints/10) == 0)
                fprintf('%f percent complete\n', (i/numDataPoints)*100);
            end
            
            %Predict a label
            predictedWeightVector = (positiveWeightVector - negativeWeightVector);
            prediction = predictedWeightVector'*(featureVectors(i,:)');
            
            if(prediction < 0.5)
                discretePrediction = 0;
            
            else
                discretePrediction = 1;
            end
            
            onlineErrors(i, 1) = (discretePrediction ~= labels(i,1));
            
            %For each feature, descend the gradient
            for j = 1:numFeatures
                pPlus = positiveWeightVector(j,:)*exp(-2*learningRate*(prediction - labels(i, 1))*featureVectors(i,j)*scalingFactor);
                pMinus = negativeWeightVector(j,:)*exp(2*learningRate*(prediction - labels(i, 1))*featureVectors(i,j)*scalingFactor);
                positiveWeightVector(j,:) = pPlus;
                negativeWeightVector(j,:) = pMinus;
            end
            
            %Make sure that the weight vector is scaled correctly.
            s = (scalingFactor/(sum(positiveWeightVector + negativeWeightVector) + 1e-10));
            
            % Possible that the weight vector became too small, causing NAN
            if(isnan(s))
                'NAN detected!'
                pause
            end
            
            % Scale the weight vectors
            positiveWeightVector = positiveWeightVector * s;
            negativeWeightVector = negativeWeightVector * s;
        end
    end
    
    % Final weight vector
    predictedWeightVector = (positiveWeightVector - negativeWeightVector);
    
    % Predict the classification on the training data for output.
    fprintf('Label classification...\n');
    [predictedLabels, continuousLabels, errors] = PredictLinearClassification(predictedWeightVector, data, labelToClassify);
 
end

%Takes all the labels, and converts them into the form [0, 1] where they
%are 0 if they are not the correct label, and 1 otherwise.
function [ binaryLabels] = MakeLabelsBinary(labels, numDataPoints, labelToClassify)
    binaryLabels = zeros(numDataPoints, 1);
    for i = 1:numDataPoints
        if(labels(i, 1) == labelToClassify)
            binaryLabels(i, 1) = 1;
        else
            binaryLabels(i,1) = 0;
        end
        
    end
end
