/*
 * Project Name: 26Nov2011-NSI_Assignment-0 
 * Package Name: net
 * File Name: FNet.java
 * Primary Type Name: FNet
 * 
 * Created or edited by Cheng Ye on Nov 26, 2011
 */
package net;

import org.apache.log4j.Logger;
import org.encog.Encog;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.propagation.Propagation;
import org.encog.neural.networks.training.propagation.back.Backpropagation;
import org.encog.neural.networks.training.propagation.manhattan.ManhattanPropagation;
import org.encog.neural.networks.training.propagation.quick.QuickPropagation;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;

import data.TrainingDataSet;
import file.NetworkFileHandler;

// TODO: Auto-generated Javadoc
/**
 * The Class FNet.
 */
public class FNet {

	/** The net. */
	private BasicNetwork fNet;

	/** The training data. */
	private TrainingDataSet trainingData;

	/** The training. */
	private Propagation training;

	/** The old net. */
	private NetworkFileHandler oldNet;

	/** The reset. */
	private boolean reset;

	/** The INPU t_ neuro n_ num. */
	public static final int INPUT_NEURON_NUM = 17;

	/** The OUTPU t_ neuro n_ num. */
	public static final int OUTPUT_NEURON_NUM = 17;

	/** The HIDDE n_ laye r_ neuro n_ num. */
	private int HIDDEN_LAYER_NEURON_NUM = 35;

	/** The HIDDE n_ laye r_ num. */
	private final int HIDDEN_LAYER_NUM = 2;

	/** The LEARNIN g_ rate. */
	private double LEARNING_RATE = 0.0001;

	/** The MOMENTUM. */
	private double MOMENTUM = 0.8;

	/** The training limit. */
	private int trainingLimit = 1;

	/** The err limit. */
	private double errLimit = 0.00000001;

	private double defaultWeight = -0.1;

	/** The Constant logger. */
	private static final Logger logger = Logger.getLogger(FNet.class);

	/**
	 * Instantiates a new f net.
	 */
	public FNet() {
		init();
	}

	/**
	 * Instantiates a new f net.
	 * 
	 * @param l
	 *            the l
	 * @param m
	 *            the m
	 * @param n
	 *            the n
	 */
	public FNet(double l, double m, int n) {
		if (l > 0 && l < 1)
			LEARNING_RATE = l;
		if (m > 0 && m < 1)
			MOMENTUM = m;
		if (n > 0) {
			HIDDEN_LAYER_NEURON_NUM = n;
			reset = true;
		}
		init();
	}

	/**
	 * Inits the.
	 */
	private void init() {

		oldNet = new NetworkFileHandler();
		if (!reset) {
			fNet = oldNet.loadNetwork(null);
			if (fNet != null)
				logger.info("Network state file is successfully loaded, the network has "
						+ fNet.getLayerNeuronCount(1)
						+ " neurons in hidden layer");
		}
		if (fNet == null) {
			logger.info("No previous network state file loaded, initialize a new network with "
					+ HIDDEN_LAYER_NEURON_NUM + " neurons in hidden layer");
			// Create input layer
			fNet = new BasicNetwork();
			fNet.addLayer(new BasicLayer(null, true, INPUT_NEURON_NUM));

			// Create hidden layer
			for (int i = 1; i <= HIDDEN_LAYER_NUM; i++) {
				fNet.addLayer(new BasicLayer(new ActivationSigmoid(), true,
						HIDDEN_LAYER_NEURON_NUM));
			}

			// Create output layer
			fNet.addLayer(new BasicLayer(new ActivationSigmoid(), false,
					OUTPUT_NEURON_NUM));

			fNet.getStructure().finalizeStructure();

			for (int i = 0; i < fNet.getLayerCount() - 1; i++) {
				for (int j = 0; j < fNet.getLayerNeuronCount(i); j++) {
					for (int k = 0; k < fNet.getLayerNeuronCount(i + 1); k++) {
						setWeight(i, j, k, defaultWeight);
					}
				}
			}


			// problem 2.1
			setWeight(0, 0, 0, 1);
			setWeight(1, 0, 0, 1);
			setWeight(2, 0, 6, 1);

			setWeight(0, 1, 1, 1);
			setWeight(0, 2, 1, -1);
			setWeight(1, 1, 1, 1);
			setWeight(2, 1, 8, 1);

			setWeight(0, 3, 2, 1);
			setWeight(1, 2, 2, 1);
			setWeight(2, 2, 4, 1);

			setWeight(0, 7, 3, 1);
			setWeight(0, 10, 3, 1);
			setWeight(1, 3, 3, 1);
			setWeight(2, 3, 14, 1);

			setWeight(0, 7, 4, 1);
			setWeight(0, 11, 4, 1);
			setWeight(1, 4, 4, 1);
			setWeight(2, 4, 15, 1);

			logger.info("Finish constructing a new network");

			// problem 2.2
//			setWeight(0, 3, 5, 1);
//			setWeight(1, 5, 5, 1);
//			setWeight(2, 5, 9, 1);
//
//			setWeight(0, 3, 6, 1);
//			setWeight(0, 4, 6, 1);
//			;
//			setWeight(1, 6, 6, 1);
//			setWeight(2, 6, 10, 1);
//
//			setWeight(0, 7, 7, 1);
//			setWeight(0, 9, 7, 1);
//			setWeight(1, 7, 7, 1);
//			setWeight(2, 7, 14, 1);
//
//			setWeight(0, 8, 8, 1);
//			setWeight(0, 10, 8, 1);
//			setWeight(1, 8, 8, 1);
//			setWeight(2, 8, 14, 1);
//
//			setWeight(2, 8, 15, 1);
		}
	}

	/**
	 * Sets the training limit.
	 * 
	 * @param limit
	 *            the new training limit
	 */
	public void setTrainingLimit(int limit) {
		trainingLimit = limit;
	}

	/**
	 * Sets the up training.
	 * 
	 * @param trainingData
	 *            the new up training
	 */
	public void setupBTraining(TrainingDataSet trainingData) {
		logger.info("Set training strategy to Backpropagation, use "
				+ LEARNING_RATE + " as learning rate, " + MOMENTUM
				+ " as momentum");
		this.trainingData = trainingData;
		training = new Backpropagation(fNet, this.trainingData.getDataSet(),
				LEARNING_RATE, MOMENTUM);
	}

	/**
	 * Sets the up test.
	 * 
	 * @param trainingData
	 *            the new up test
	 */
	public void setupTest(TrainingDataSet trainingData) {
		this.trainingData = trainingData;
	}

	/**
	 * Sets the up r training.
	 * 
	 * @param trainingData
	 *            the new up r training
	 */
	public void setupRTraining(TrainingDataSet trainingData) {
		logger.info("Set training strategy to Resilient Propagation");
		this.trainingData = trainingData;
		training = new ResilientPropagation(fNet,
				this.trainingData.getDataSet());
	}

	/**
	 * Sets the up q training.
	 * 
	 * @param trainingData
	 *            the new up q training
	 */
	public void setupQTraining(TrainingDataSet trainingData) {
		logger.info("Set training strategy to Quick Propagation, use "
				+ LEARNING_RATE + " as learning rate");
		this.trainingData = trainingData;
		training = new QuickPropagation(fNet, this.trainingData.getDataSet(),
				LEARNING_RATE);
	}

	/**
	 * Sets the up m training.
	 * 
	 * @param trainingData
	 *            the new up m training
	 */
	public void setupMTraining(TrainingDataSet trainingData) {
		logger.info("Set training strategy to Manhattan Update Propagation, use "
				+ LEARNING_RATE + " as learning rate");
		this.trainingData = trainingData;
		training = new ManhattanPropagation(fNet,
				this.trainingData.getDataSet(), LEARNING_RATE);
	}

	/**
	 * Train.
	 */
	public void train() {
		// if (trainingData.getDataSetSize() <= 0) {
		// return;
		// }
		// logger.info("Start training");
		// logger.info("There are " + trainingData.getDataSetSize()
		// + " training samples");
		// logger.info("Error limit is set to " + errLimit
		// + ", Maximum rounds is set to " + trainingLimit);
		// long startTime = System.currentTimeMillis();
		// long epoch = 0;
		// do {
		// training.iteration();
		// epoch++;
		// } while ((training.getError() > errLimit && training.getIteration() <
		// trainingLimit)
		// || (training.getError() > errLimit && trainingLimit == 0));
		// logger.info("Training is finished in " + epoch + " rounds");
		// logger.info("The error rate for the training is "
		// + fNet.calculateError(trainingData.getDataSet()));
		// logger.info("Time consumed by the training is "
		// + (System.currentTimeMillis() - startTime) + "ms");
		oldNet.saveNetwork(fNet);
		Encog.getInstance().shutdown();
	}

	/**
	 * Train.
	 */
	public void verboseTrain() {
		if (trainingData.getDataSetSize() <= 0) {
			return;
		}
		logger.info("Start training.");
		logger.info("There are " + trainingData.getDataSetSize()
				+ " training samples");
		logger.info("Error limit is set to " + errLimit
				+ ", Maximum rounds is set to " + trainingLimit);
		long startTime = System.currentTimeMillis();
		long epoch = 0;
		logger.info("The result for training round 0:");
		do {
			training.iteration();
			epoch++;
			logger.info("Current error rate is " + training.getError());
			logger.info("The result for training round "
					+ training.getIteration() + ":");
		} while ((training.getError() > errLimit && training.getIteration() < trainingLimit)
				|| (training.getError() > errLimit && trainingLimit == 0));
		logger.info("Current error rate is "
				+ fNet.calculateError(trainingData.getDataSet()));
		logger.info("Training is finished in " + epoch + " rounds");
		long stopTime = System.currentTimeMillis();
		double[] sInput;
		double[] sIdeal;
		double[] actual;
		int counter = 0;
		String temp;
		String o1;
		String o2;
		for (int i = 0; i < trainingData.getDataSetSize(); i++) {
			logger.info("The result for training sample " + (i + 1) + ":");

			temp = "The input is \t\t[ ";
			sInput = trainingData.getSingleInput(i);
			for (double d : sInput) {
				temp += (int) d + " ";
			}
			temp += "]";
			logger.info(temp);

			temp = "The ideal output is \t[ ";
			sIdeal = trainingData.getSingleIdeal(i);
			o1 = "";
			for (double d : sIdeal) {
				temp += (int) d + " ";
				o1 += (int) d;
			}
			temp += "]";
			logger.info(temp);

			temp = "The (rounded)output is \t[ ";
			actual = fNet.compute(
					trainingData.getDataSet().getData().get(i).getInput())
					.getData();
			o2 = "";
			for (double d : actual) {
				temp += Math.round(d) + " ";
				o2 += Math.round(d);
			}
			temp += "]";
			if (o1.equals(o2))
				counter++;
			logger.info(temp);
		}
		logger.info("There are " + counter + " out of "
				+ trainingData.getDataSetSize()
				+ " training samples are well classified");
		logger.info("The error rate for the training is "
				+ fNet.calculateError(trainingData.getDataSet()));
		logger.info("Time consumed by the training is "
				+ (stopTime - startTime) + "ms");
		oldNet.saveNetwork(fNet);
		Encog.getInstance().shutdown();
	}

	/**
	 * Verbose test.
	 */
	public void verboseTest() {
		if (trainingData.getDataSetSize() <= 0) {
			return;
		}
		logger.info("There are " + trainingData.getDataSetSize()
				+ " test samples");
		double[] sInput;
		double[] sIdeal;
		double[] actual;
		int counter = 0;
		String temp;
		String o1;
		String o2;
		for (int i = 0; i < trainingData.getDataSetSize(); i++) {
			logger.info("The result for test sample " + (i + 1) + ":");

			temp = "The input is \t\t[ ";
			sInput = trainingData.getSingleInput(i);
			for (double d : sInput) {
				temp += (int) d + " ";
			}
			temp += "]";
			logger.info(temp);

			temp = "The ideal output is \t[ ";
			sIdeal = trainingData.getSingleIdeal(i);
			o1 = "";
			for (double d : sIdeal) {
				temp += (int) d + " ";
				o1 += (int) d;
			}
			temp += "]";
			logger.info(temp);

			temp = "The (rounded)output is \t[ ";
			actual = fNet.compute(
					trainingData.getDataSet().getData().get(i).getInput())
					.getData();
			o2 = "";
			for (double d : actual) {
				temp += Math.round(d) + " ";
				o2 += Math.round(d);
			}
			temp += "]";
			if (o1.equals(o2)) {
				counter++;
			}
			logger.info(temp);
		}
		logger.info("Test is finished");
		logger.info("There are " + counter + " out of "
				+ trainingData.getDataSetSize()
				+ " test samples are well classified");
		logger.info("The error rate for the test is "
				+ fNet.calculateError(trainingData.getDataSet()));
	}

	/**
	 * Test.
	 */
	public void test() {
		if (trainingData.getDataSetSize() <= 0) {
			return;
		}
		logger.info("There are " + trainingData.getDataSetSize()
				+ " test samples");
		logger.info("Test is finished");
		logger.info("The error rate for the test is "
				+ fNet.calculateError(trainingData.getDataSet()));
	}

	/**
	 * Sets the err limit.
	 * 
	 * @param errLimit
	 *            the new err limit
	 */
	public void setErrLimit(double errLimit) {
		this.errLimit = errLimit;
	}

	public void setWeight(int layer, int from, int to, double weight) {
		if (fNet == null) {
			logger.error("Cannot set the weight of the connection from the neuron "
					+ from
					+ " in layer "
					+ layer
					+ " to neuron "
					+ to
					+ " on next layer: network doesn't exist");
		} else {
			try {
				fNet.setWeight(layer, from, to, weight);
				logger.info("The weight of the connection from the neuron "
						+ from + " in layer " + layer + " to neuron " + to
						+ " on next layer is set to " + weight);
			} catch (Exception e) {
				logger.error("Cannot set the weight of the connection from the neuron "
						+ from
						+ " in layer "
						+ layer
						+ " to neuron "
						+ to
						+ " on next layer: " + e.getMessage());
			}
		}
	}
}