% load in all the data
load('allData.mat')

test_ratio = 0.2; % percent of data used as training, the rest as testing;
num_split = 10;
numBins = [8 8 8];
numLevels = 3;
outFolder = './result';
if ~exist(outFolder)
	mkdir(outFolder)
end

addpath('../liblinear-1.93/matlab/');
cateName = cell(10);
cateName{1} = 'White';
cateName{2} = 'Black';
cateName{3} = 'Red';
cateName{4} = 'Yellow';
cateName{5} = 'Dark Blue';
cateName{6} = 'Green';
cateName{7} = 'Gray';
cateName{8} = 'Silver';
cateName{9} = 'Light Blue';
cateName{10}= 'Champion';

%{

dataPath = '../CarColorSamples';
labelVec=[];
dataCell = cell(0);
iSample= 1;
for iColor = 0:9
    color_path = fullfile(dataPath, num2str(iColor));
    fileList = dir(fullfile(color_path,'*.jpg'));
    for iFile= 1:length(fileList);
    labelVec= [labelVec iColor];
    dataCell{iSample}=imresize(imread(fullfile(color_path,fileList(iFile).name)),[NaN,200]);
    iSample = iSample+ 1;
    end
end
%}

numImage = length(labelVec);
colorCell=cell(numImage,1);
%kernel = makecform('srgb2lab');
for iImage = 1:numImage
   %colorCell{iImage} =applycform(dataCell{iImage},kernel);
   %colorCell{iImage} = RGB2Lab(dataCell{iImage});
   colorCell{iImage} = rgb2hsv(dataCell{iImage});
end

assignmentMatrix = zeros(num_split,numImage);
numTest = round(numImage*test_ratio);
for i_split = 1:num_split
    test = randperm(numImage,numTest);
    assignmentMatrix(i_split,test)=1;
end

% compute image features
feature = [];
img = colorCell{1};
feature = colorPyramid(img(:,:,3),img(:,:,2),img(:,:,1),numBins,numLevels);  
featureMat = zeros(numImage,length(feature));
for iSample = 1:numImage
    img = colorCell{iSample};
    feature = colorPyramid(img(:,:,3),img(:,:,2),img(:,:,1),numBins,numLevels);  
    featureMat(iSample,:) = feature;
end

%save('allData.mat','dataCell','labelVec','colorCell');
%save('allData.mat')


%% run all kinds of regresion using liblinear
%{
-s type : set type of solver (default 1)
  for multi-class classification
	 0 -- L2-regularized logistic regression (primal)
	 1 -- L2-regularized L2-loss support vector classification (dual)
	 2 -- L2-regularized L2-loss support vector classification (primal)
	 3 -- L2-regularized L1-loss support vector classification (dual)
	 4 -- support vector classification by Crammer and Singer
	 5 -- L1-regularized L2-loss support vector classification
	 6 -- L1-regularized logistic regression
	 7 -- L2-regularized logistic regression (dual)
  for regression
	11 -- L2-regularized L2-loss support vector regression (primal)
	12 -- L2-regularized L2-loss support vector regression (dual)
	13 -- L2-regularized L1-loss support vector regression (dual)
-c cost : set the parameter C (default 1)
-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)
-e epsilon : set tolerance of termination criterion
	-s 0 and 2
		|f'(w)|_2 <= eps*min(pos,neg)/l*|f'(w0)|_2,
		where f is the primal function and pos/neg are # of
		positive/negative data (default 0.01)
	-s 11
		|f'(w)|_2 <= eps*|f'(w0)|_2 (default 0.001) 
	-s 1, 3, 4 and 7
		Dual maximal violation <= eps; similar to libsvm (default 0.1)
	-s 5 and 6
		|f'(w)|_inf <= eps*min(pos,neg)/l*|f'(w0)|_inf,
		where f is the primal function (default 0.01)
	-s 12 and 13\n"
		|f'(alpha)|_1 <= eps |f'(alpha0)|,
		where f is the dual function (default 0.1)
-B bias : if bias >= 0, instance x becomes [x; bias]; if < 0, no bias term added (default -1)
-wi weight: weights adjust the parameter C of different classes (see README for details)
-v n: n-fold cross validation mode
-q : quiet mode (no outputs)
%}
bLinearOptions = ['-s 0 -c 100 -B +1 -q'];
confMats = cell(num_split,1);
average_conf = zeros(num_split,1);
mean_mat = zeros(10,10);
for i_split = 1:num_split
    train_index = find(assignmentMatrix(i_split,:)==0);
    test_index = find(assignmentMatrix(i_split,:)==1);
    
    trainData = featureMat(train_index,:);
    testData = featureMat(test_index,:);
    train_gt = labelVec(train_index)';
    test_gt = labelVec(test_index)';
    
    model = train(train_gt+1,sparse(trainData),libLinearOptions);

    [label,~,prob] = predict(test_gt+1,sparse(testData),model,'-b 1');
    [~,ind]=sort(model.Label);
    probMat=prob(:,ind);

    T = zeros(10,length(test_gt));
    for iSample = 1:length(test_gt)
        T(test_gt(iSample)+1,iSample)=1;
    end
    [~,confMat,~,~] = confusion(T,probMat');
    average_conf(i_split) = sum(diag(confMat))/sum(confMat(:));
    mean_mat = mean_mat + confMat;


    % find false classified images
    %{
    for iSample= 1:length(test_gt)
	if max(probMat(iSample,:))>probMat(iSample,test_gt(iSample)+1)
		[~, false_class] = max(probMat(iSample,:)); 
		ind_img = test_index(iSample);
		img = dataCell{ind_img};
		fileName = [ 'split_' num2str(i_split) '_image_' num2str(ind_img) '_take_' cateName{labelVec(ind_img)+1} '_as_' cateName{false_class} '.png'];
        fileName = fullfile(outFolder,fileName);
		imwrite(img,fileName); 		
    end
    end
    %}
end
mean_mat
average_conf'
mean(average_conf)

%save('allAfetrModel.mat');
%save('models.mat','models','numBins');
% visualize models
%for iModel = 1:length(models)
%    plotWeights(models(iModel).w*models(iModel).Label(1),numBins);
%end
