package fr.ups.jim.rdf.tp4;

/**
 * @author Ji MA (ji.ma@u-psud.fr)
 * @author Antoine Sylvain (antoinesyl@gmail.com)
 * @version 1.0a
 */

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;

import org.omg.CORBA.TRANSACTION_MODE;

public class Perceptron {

	static class Example {
		public List<Double> observation;

		public Integer label;

		public Example(List<Double> obser, Integer label) {
			this.observation = obser;
			this.label = label;
		}

		public String toString() {
			return "" + this.label;
		}
	}

	public static List<Example> readSpamData(String fileName)
			throws IOException, FileNotFoundException {

		List<Example> res = new ArrayList<Example>();
		InputStream ips = new FileInputStream(fileName);
		InputStreamReader ipsr = new InputStreamReader(ips);
		BufferedReader br = new BufferedReader(ipsr);

		while (true) {
			String line = br.readLine();
			if (line == null) {
				break;
			}

			String[] el = line.split(",");
			List<Double> obser = new ArrayList<Double>();
			for (int i = 0; i < el.length - 1; i++) {
				obser.add(Double.parseDouble(el[i]));
			}
			obser.add(1.0);

			Integer label = Integer.parseInt(el[el.length - 1]);
			if (label == 0) {
				label = -1;
			}

			Example e = new Example(obser, label);
			res.add(e);
		}
		br.close();

		return res;
	}

	/**
	 * readDeputyData constructs a list of examples in which every example
	 * consists of two fields : <br>
	 * <li>one Double List has a length of 17 which represents 16
	 * features(attributes "yes" is denoted as 1.0, "no" is denoted as 2.0, and
	 * 3.0 is for "?", an unknown value) and the last Double value replaces bias
	 * item; <br> <li>another field is an Integer value which stands for the
	 * classification of example, "republican" is denoted as -1 while 1 stands
	 * for "democrat"
	 * 
	 * @param fileName
	 *            the filename of corpus
	 * @return a list of examples well-structured
	 * @throws IOException
	 * @throws FileNotFoundException
	 */
	public static List<Example> readDeputyData(String fileName)
			throws IOException, FileNotFoundException {

		List<Example> res = new ArrayList<Example>();
		InputStream ips = new FileInputStream(fileName);
		InputStreamReader ipsr = new InputStreamReader(ips);
		BufferedReader br = new BufferedReader(ipsr);

		while (true) {
			String line = br.readLine();
			if (line == null) {
				break;
			}

			String[] el = line.split(",");
			List<Double> obser = new ArrayList<Double>();
			for (int i = 1; i <= el.length - 1; i++) {
				if ("y".equals(el[i])) {
					obser.add(1.0);
				} else if ("n".equals(el[i])) {
					obser.add(2.0);
				} else {
					obser.add(3.0);
				}
			}
			obser.add(1.0);
			Integer label = 0;

			if ("democrat".equals(el[0])) {
				label = 1;
			} else {
				label = -1;
			}

			Example e = new Example(obser, label);
			res.add(e);
		}
		br.close();

		return res;
	}

	/**
	 * Classify the given example (observation) based on the provided parameter
	 * 
	 * @param observation
	 *            given example to be classified
	 * @param parameter
	 *            a vector of parameters
	 * @return the classification of given example, 1 stands for democrat and -1
	 *         stands for republican
	 */
	public static int classify(List<Double> observation, Vector parameter) {
		Vector obs = new Vector(observation);
		if (obs.dot(parameter) >= 0)
			return 1;
		else
			return -1;
	}

	/**
	 * This method calculates the error rate of given perceptron (parameters)
	 * for a given corpus
	 * 
	 * @param corpus
	 *            corpus to be tested
	 * @param parameters
	 *            given vector of parameters
	 * @return the error rate
	 */
	public static double test(List<Example> corpus, Vector parameters) {
		int errorCounter = 0;

		for (Example ex : corpus) {
			if (!ex.label.equals(classify(ex.observation, parameters)))
				errorCounter++;
		}

		return ((double) errorCounter / (double) corpus.size());
	}

	/**
	 * This helper method facilitates the method "learn" to determine if there
	 * are any classification errors
	 * 
	 * @param corpus
	 *            corpus to be tested
	 * @param parameters
	 *            given vector of parameters
	 * @return true if any errors; false otherwise
	 */
	public static boolean hasErrors(List<Example> corpus, Vector parameters) {
		boolean errors = false;

		for (Example ex : corpus) {
			if (!ex.label.equals(classify(ex.observation, parameters))) {
				errors = true;
				break;
			}
		}

		return errors;
	}

	/**
	 * This method implements a learning algorithm for generating a perceptron
	 * 
	 * @param trainingCorpus
	 *            training corpus
	 * @param iterLimit
	 *            user-defined upper-limit for fixing perceptron
	 * @param lambda
	 *            learning rate
	 * @return calculated perceptron represented in a Vector
	 */
	public static Vector learn(List<Example> trainingCorpus, int iterLimit,
			double lambda) {
		// construct vector of parameters(theta) with proper size
		// all parameters are initialized as 0
		int vectorSize = trainingCorpus.get(0).observation.size();
		Vector parameters = new Vector(vectorSize);

		int iterTime = 0;
		while (iterTime < iterLimit) {
			for (Example ex : trainingCorpus) {
				if (!ex.label.equals(classify(ex.observation, parameters))) {
					iterTime++;
					Vector x = new Vector(ex.observation);
					parameters = parameters.add((x.add(ex.label.doubleValue()))
							.add(lambda));
					break;
				}
			}
		}

		return parameters;
	}

	/**
	 * This methode implements a learning algorithm for generating a perceptron
	 * by evaluating the varience of error rate.
	 * 
	 * @param trainingCorpus
	 *            training corpus
	 * @param errorRatesNum
	 *            number of error rates precedent to be considered
	 * @param threshold
	 *            threshold for the varience
	 * @param lambda
	 *            learning rate
	 * @return calculated perceptron represented in a Vector
	 */
	public static Vector learnWithVariance(List<Example> trainingCorpus,
			int errorRatesNum, double threshold, double lambda) {
		// construct vector of parameters(theta) with proper size
		// all parameters are initialised as 0
		int vectorSize = trainingCorpus.get(0).observation.size();
		Vector parameters = new Vector(vectorSize);

		// initialise the precedent error rate array
		List<Double> errorRatesP = new ArrayList<Double>();
		double variance = 1.0;

		double tempErrorRate;
		Example ex;
		List<Example> developmentCorpus;

		while (variance >= threshold) {
			for (int i = 0; i < trainingCorpus.size(); i++) {
				ex = trainingCorpus.get(i);
				if (!ex.label.equals(classify(ex.observation, parameters))) {
					// calculate development corpus
					developmentCorpus = trainingCorpus.subList(0, i + 1);

					// calculate error rate on development corpus and
					// construct new error rates array for calculating variance
					tempErrorRate = test(developmentCorpus, parameters);
					if (errorRatesP.size() < errorRatesNum) {
						errorRatesP.add(new Double(tempErrorRate));
					} else {
						errorRatesP.remove(0);
						errorRatesP.add(new Double(tempErrorRate));
					}

					// calculate variance
					variance = calculateVariance(errorRatesP);

					// calculate new vector of parameters
					Vector x = new Vector(ex.observation);
					parameters = parameters.add((x.add(ex.label.doubleValue()))
							.add(lambda));

					// ensure the learning process won't start too early
					if (errorRatesP.size() < errorRatesNum)
						continue;
					else
						break;
				}
			}
		}

		return parameters;
	}

	/**
	 * This method calculates variance for a given double-value list
	 * 
	 * @param errorRates
	 *            a list consists of a number of error rate
	 * @return the variance of all given error rates
	 */
	public static double calculateVariance(List<Double> errorRates) {
		if (errorRates.size() == 0)
			return 0.0;
		else {
			double avg = 0.0;
			for (Double d : errorRates)
				avg += d.doubleValue();
			avg = avg / errorRates.size();

			double variance = 0.0;
			for (Double d : errorRates) {
				variance += Math.pow((d.doubleValue() - avg), 2);
			}

			return variance;
		}
	}

	public static void main(String[] v) throws IOException,
			FileNotFoundException {

		List<Example> data = readDeputyData("house-votes-84.data");
//		List<Example> data2 = readSpamData("spambase.data");

		/*
		 * Shuffle the order of examples in corpus The first 100 examples are
		 * used for test Other remaining examples are used for learning
		 */
		Collections.shuffle(data);
//		Collections.shuffle(data2);

		/* divide the corpus into test corpus and training corpus */
		List<Example> testCorpus = data.subList(0, 100);
		List<Example> trainingCorpus = data.subList(100, data.size());

//		List<Example> testCorpus2 = data2.subList(0, 100);
//		List<Example> trainingCorpus2 = data2.subList(100, data2.size());

		/* Question 5 */
//		Vector v1 = new Vector("25 -12 67 -104 -43 46 -18 -10 45 -33 54 -39 "
//				+ "43 -19 5 -2 55");
//		System.out.println(test(data, v1));


		/* Question 7 */
//		System.out.println("iteration limit\tError rate on test corpus\tError "
//				+ "rate on learning corpus");
//		for (int i = 1; i <= 50; i++) {
//			System.out.println(i + "\t"
//					+ test(testCorpus, learn(trainingCorpus, i, 1.0)) + "\t"
//					+ test(trainingCorpus, learn(trainingCorpus, i, 1.0)));
//		}
//
//		System.out.println("iteration limit\tError rate on test corpus\tError "
//				+ "rate on learning corpus");
//		for (int i = 1; i <= 50; i++) {
//			System.out.println(i + "\t"
//					+ test(testCorpus2, learn(trainingCorpus2, i, 1.0)) + "\t"
//					+ test(trainingCorpus2, learn(trainingCorpus2, i, 1.0)));
//		}

		
		/* Question 10 : test for method learnWithVariance */
//		double avg = 0.0;
//		for (int i = 0; i < 50; i++) {
//			Vector v1 = learnWithVariance(trainingCorpus, 5, 0.00001, 1.0);
//			avg += test(testCorpus, v1);
//		}
//		System.out.println(avg / 50);
		
//		System.out.println("iteration limit\tError rate on test corpus");
//		for (int i = 1; i <= 500; i++) {
//			System.out.println(i + "\t"
//			+ test(testCorpus, learn(trainingCorpus, i, 1.0)));
//		}

		/* Question 11 */
//		double avg = 0.0;
//		for (int i = 0; i < 50; i++) {
//			Vector v1 = learnWithVariance(trainingCorpus2, 10, 0.00000001, 1.0);
//			avg += test(testCorpus2, v1);
//		}
//		System.out.println(avg / 50);	
		
//		System.out.println("iteration limit\tError rate on test corpus");
//		for (int i = 1; i <= 500; i++) {
//			System.out.println(i + "\t"
//			+ test(testCorpus2, learn(trainingCorpus2, i, 1.0)));
//		}
		
		
		/*
		 * Question 12 : test la vitesse de convergence pour differentes
		 * valueurs de alpha
		 */
//		System.out.println("iteration limit\tError rate on test corpus");
//		for (int i = 10000; i <= 10020; i++) {
//			System.out.println(i + "\t"
//					+ test(testCorpus, learn(trainingCorpus, i, 1.0)));
//		}
//
//		System.out.println("iteration limit\tError rate on test corpus");
//		for (int i = 10000; i <= 10020; i++) {
//			System.out.println(i + "\t"
//					+ test(testCorpus2, learn(trainingCorpus2, i, 1.0)));
//		}

	}
}
