load ..\dataforproject.mat
% Runs 3-fold avg for adaboost running adaboost on 7 different runs of 
% brute force SVM (the 7 'weak learners').

% generate train data
tr_data = gen_data_brute_force(X1train, X2train);
tr_label = ytrain;

% genereate test data
te_data = gen_data_brute_force(X1test, X2test);
te_label = ones(size(te_data,1),1);

% normalize the data to [0,1]. Test is normalized according to
% train data.
[tr_data, norm_params] = norm_data_brute_force(tr_data);
[te_data, ~] = norm_data_brute_force(te_data, norm_params);


% Train SVMs
% Polynomial Kernel of degree 4
C_param = 1000;
gamma_param = 0.01;
coef0_param = 1;
degree_param = 4;
kernel_param = 1; 
model1 = train_svm(tr_data, tr_label, te_data, te_label, kernel_param, C_param, gamma_param, coef0_param, degree_param);
[train_results_1, ~, ~] = svmpredict(tr_label, tr_data, model1, '');
[test_results_1, ~, ~] = svmpredict(te_label, te_data, model1, '');


% Radial Kernel 
C_param = 1000;
gamma_param = 0.01;
kernel_param = 2;

model2 = train_svm(tr_data, tr_label, te_data, te_label, kernel_param, C_param, gamma_param, coef0_param, degree_param);
[train_results_2, ~, ~] = svmpredict(tr_label, tr_data, model2, '');
[test_results_2, ~, ~] = svmpredict(te_label, te_data, model2, '');

%Linear Kernel 
kernel_param = 0;

model3 = train_svm(tr_data, tr_label, te_data, te_label, kernel_param, C_param, gamma_param, coef0_param, degree_param);
[train_results_3, ~, ~] = svmpredict(tr_label, tr_data, model3, '');
[test_results_3, ~, ~] = svmpredict(te_label, te_data, model3, '');

% Different polynomial Kernels
C_param = 100;
gamma_param = 0.1;
coef0_param = 0;
degree_param = 2;
kernel_param = 1; % Polynomial kernel

model = train_svm(tr_data, tr_label, te_data, te_label, kernel_param, C_param, gamma_param, coef0_param, degree_param);
[train_results_4, ~, ~] = svmpredict(tr_label, tr_data, model, '');
[test_results_4, ~, ~] = svmpredict(te_label, te_data, model, '');

C_param = 10;
gamma_param = 0.1;
coef0_param = 0;
degree_param = 3;
kernel_param = 1; % Polynomial kernel

 model = train_svm(tr_data, tr_label, te_data, te_label, kernel_param, C_param, gamma_param, coef0_param, degree_param);
[train_results_5, ~, ~] = svmpredict(tr_label, tr_data, model, '');
[test_results_5, ~, ~] = svmpredict(te_label, te_data, model, '');

C_param = 10000;
gamma_param = 0.01;
coef0_param = 0;
degree_param = 2;
kernel_param = 1; % Polynomial kernel

 model = train_svm(tr_data, tr_label, te_data, te_label, kernel_param, C_param, gamma_param, coef0_param, degree_param);
[train_results_6, ~, ~] = svmpredict(tr_label, tr_data, model, '');
[test_results_6, ~, ~] = svmpredict(te_label, te_data, model, '');        


C_param = 1;
gamma_param = 0.1;
coef0_param = 0;
degree_param = 2;
kernel_param = 1; % Polynomial kernel        
 model = train_svm(tr_data, tr_label, te_data, te_label, kernel_param, C_param, gamma_param, coef0_param, degree_param);
[train_results_7, ~, ~] = svmpredict(tr_label, tr_data, model, '');
[test_results_7, ~, ~] = svmpredict(te_label, te_data, model, '');         


% Adaboost
% Save all weak learners together - pre-processing for adaboost
dataFeatures = [train_results_1 train_results_2 train_results_3 train_results_4 train_results_5 train_results_6 train_results_7];
dataclass = tr_label;
testdata = [test_results_1 test_results_2 test_results_3 test_results_4 test_results_5 test_results_6 test_results_7];  


% Run adaboost
[~,model]=adaboost('train',dataFeatures,dataclass,500);        
ytest=adaboost('apply',testdata,model);


save ytest.mat ytest