package com.stanford.report;

import java.util.ArrayList;
import java.util.List;
import java.util.Set;

import com.stanford.data.DataUtil;
import com.stanford.data.User;
import com.stanford.ml.ClassifierMeasures;
import com.stanford.ml.naivebayes.ClassLabel;
import com.stanford.ml.naivebayes.NaiveBayes;
import com.stanford.ml.naivebayes.TrainingSet;
import com.stanford.ml.svm.Problem;
import com.stanford.ml.svm.SVM;

/**
 * Includes the utility methods for reporting the results of the
 * machine learning algorithms.
 * 
 * @author Fatih Sunor
 *
 */
public class MachineLearning {
	
	private static final String STARS = "*************************************************************\n";
	private static final String SVM = "SVM Classification Results for ";
	private static final String NAIVE= "Naive Bayes Classification Results for ";
	private static final String WITH = " with ";
	private static final String USERS = " users\n";
	private static final String TRAINED = " features are trained\n";
	private static final String ACCURACY = "The accuracy on the test data:";
	
	/**
	 * Reports the classification results from Support Vector Machine
	 * @return a string that summarizes the results from Support Vector Machine
	 */
	public static String reportSVM(List<User> users, String webSite){
		String result = "\n\n"+STARS;
		users = DataUtil.filterRatedUsers(users);
		result = result + SVM + webSite + WITH + users.size() + USERS;
		SVM s = new SVM();
		
		//User data is split into two parts, the first part to train the
		//learning algorithm and the second part to test the performance of it.
		int splitIndex = users.size()/2;
		
		//Train SVM
		Problem problem = new Problem();
		List<User> training = users.subList(0, splitIndex);
		List<Integer[]> featuresTraining = new ArrayList<Integer[]>();
		featuresTraining.addAll(DataUtil.getAllFactors(training).values());
		problem.setL(featuresTraining.get(0).length);
		problem.setN(featuresTraining.size()+1);
		problem.populateProblem(DataUtil.getFeatures(featuresTraining), DataUtil.getRatings(training,DataUtil.RATED_ONLY));
        s.train(problem);
        result = result + (featuresTraining.size()+1) + TRAINED;
        
        //Test SVM
        Problem test =  new Problem();
		List<User> testing = users.subList(splitIndex+1, users.size()-1);
		List<Integer[]> featuresTesting = new ArrayList<Integer[]>();
		featuresTesting.addAll(DataUtil.getAllFactors(testing).values());
		test.setL(featuresTesting.get(0).length);
		test.setN(featuresTesting.size()+1);
		test.populateProblem(DataUtil.getFeatures(featuresTesting), DataUtil.getRatings(training,DataUtil.RATED_ONLY));
        int [] predictions = s.test(test);
        ClassifierMeasures e = new ClassifierMeasures(test, predictions);
        result = result + ACCURACY + e.Accuracy()+"\n";
        result = result + STARS + "\n";
        return result;
	}
	
	/**
	 * Reports the classification results from Naive Bayes Classifier
	 * @param users is the data to be classified
	 * @param webSite is the website the user rating is classified
	 * @return a string summary of the classification results.
	 */
	public static String reportNaiveBayes(List<User> users, String webSite){
		users = DataUtil.filterRatedUsers(users);
		String result = "\n\n"+STARS;
		result = result + NAIVE + webSite + WITH + users.size() + USERS;
		users = DataUtil.filterRatedUsers(users);
		//User data is split into two parts, the first part to train the
		//learning algorithm and the second part to test the performance of it.
		users = DataUtil.filterRatedUsers(users);
		int splitIndex = users.size()/2;
		
		//Naive Bayes Training
		List<User> training = users.subList(0, splitIndex);
        TrainingSet tSet = new TrainingSet(DataUtil.instanceListToArray(training));
        NaiveBayes naiveBayes = new NaiveBayes(tSet, users);
        Set<String> allTags = naiveBayes.getTagMaxMap().keySet();
        naiveBayes.trainOnAttribute("Age");
        naiveBayes.trainOnAttribute("Gender");
        naiveBayes.trainOnAttribute(allTags);
        naiveBayes.train();
        result = result + (tSet.getSize()) + TRAINED;
        
        //Test Naive Bayes
		List<User> testing = users.subList(splitIndex+1, users.size()-1);
        int accurate = 0;
        for(User user: testing){
        	ClassLabel c = naiveBayes.classify(user);
        	if(Integer.valueOf(c.getName())== user.getRating()){
        		accurate++;
        	}
        }
        double accuracy = (double)accurate/(double)testing.size();
        result = result + ACCURACY + accuracy+"\n";
        result = result + STARS + "\n";
        return result;
	}
}
