import java.util.Random;

import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataPair;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.ml.train.MLTrain;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.propagation.back.Backpropagation;
import org.encog.util.logging.EncogLogging;

import AliDiepPR.ANN.Learn;
import AliDiepPR.ANN.Network;
import AliDiepPR.ANN.NeuralNetworkException;
import AliDiepPR.ANN.ActivationFunction.SigmoidFunction;

/**
 * XOR: This example is essentially the "Hello World" of neural network
 * programming. This example shows how to construct an Encog neural network to
 * predict the output from the XOR operator. This example uses backpropagation
 * to train the neural network.
 * 
 * This example attempts to use a minimum of Encog features to create and train
 * the neural network. This allows you to see exactly what is going on. For a
 * more advanced example, that uses Encog factories, refer to the XORFactory
 * example.
 * 
 */
public class xor {

	/**
	 * The input necessary for XOR.
	 */
	public static double XOR_INPUT[][] = { { 0.0, 0.0 }, { 1.0, 0.0 },
			{ 0.0, 1.0 }, { 1.0, 1.0 } };

	/**
	 * The ideal data necessary for XOR.
	 */
	public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };

	/**
	 * The main method.
	 * 
	 * @param args
	 *            No arguments are used.
	 * @throws NeuralNetworkException
	 */
	public static void main(final String args[]) throws NeuralNetworkException {

		// stop console logging
		// Logging.stopConsoleLogging();

		// create a neural network, without using a factory
		BasicNetwork network = new BasicNetwork();
		network.addLayer(new BasicLayer(null, false, 2));
		network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
		network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
		network.getStructure().finalizeStructure();
		network.reset();
		// create training data
		MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
		// train the neural network
		final MLTrain train = new Backpropagation(network, trainingSet, 0.1, 0);
		int epoch = 1;

		do {
			train.iteration();
			// System.out.println("Epoch #" + epoch + " Error:" +
			// train.getError());
			epoch++;
		} while (train.getError() > 0.01);

		// test the neural network
		System.out.println("Neural Network Results:" + epoch);
		for (MLDataPair pair : trainingSet) {
			final MLData output = network.compute(pair.getInput());
			System.out.println(pair.getInput().getData(0) + ","
					+ pair.getInput().getData(1) + ", actual="
					+ output.getData(0) + ",ideal="
					+ pair.getIdeal().getData(0));
		}

		AliDiepPR.ANN.Network net = new Network(3, new int[] { 2, 3, 1 });
		net.Randomize(new Random());
		AliDiepPR.ANN.Learn learn = new Learn(net, 0.1f, new SigmoidFunction());
		epoch = 0;
		do {
			learn.LearnBackpropagation(XOR_INPUT, XOR_IDEAL);
			epoch++;
		} while (learn.GradientError(XOR_INPUT, XOR_IDEAL) > 0.01);
		
		System.out.println("Ali Results:" + epoch);
		for (int i = 0; i < 4; i++) {
			double output[] = learn.Test(net, XOR_INPUT[i]);
			System.out.println(XOR_INPUT[i][0] + ","
					+ XOR_INPUT[i][1] + ", actual="
					+ output[0] + ",ideal="
					+ XOR_IDEAL[i][0]);
		}
	}
}