package NeuralAbstraction;

import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataPair;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.ml.train.MLTrain;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.lma.LevenbergMarquardtTraining;
import org.encog.neural.networks.training.propagation.back.Backpropagation;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;

class MLPNetwork extends NeuralNetwork {
	private MLTrain train;
	private MLDataSet trainingSet;
	private BasicNetwork network;	
	//private SomeNormalizationParameters

	MLPNetwork(double[][] inputData, double[][] targetData, int[] layers, String name) throws Exception{		
		super.name = name;
		super.inputsCount = layers[0];
		super.outputsCount = layers[layers.length-1];
		super.layersCount = layers.length;
	
		if (layers.length < 2)
			throw new Exception("there must be as little as 2 layers");		
		
		trainingSet = new BasicMLDataSet(inputData, targetData);
		network = new BasicNetwork();
		network.addLayer(new BasicLayer(null, true, layers[0])); //set input layer
		for (int i=1; i<layers.length-1; i++){			//set "hidden" layers
			network.addLayer(new BasicLayer(new ActivationSigmoid(), true , layers[i]));		
		}		
		network.addLayer(new BasicLayer(new ActivationSigmoid(), false, layers[layers.length-1])); //set output layer
		network.getStructure().finalizeStructure();
		network.reset();				
		
	}
	
	int trainNetwork(LearningAlgorithm algo, double maxError){
		switch (algo) {
		case RProp:
		train = new ResilientPropagation(network, trainingSet);
			break;
		case LevenbergMarquardt:
			train = new LevenbergMarquardtTraining(network, trainingSet);
			break;
		default:
			train = new Backpropagation(network, trainingSet, 0.7, 0.9);
		}
		int epoch = 1;
		do {
			train.iteration();
			//System.out.println("Epoch #" + epoch + " Error:" + train.getError());
			epoch++;
		} while(train.getError() > maxError);	
		isTrained = true;
		return epoch;
	}
	
	public double[][] computeResult(double[][] input){	
		double[][] result = new double[input.length][super.outputsCount];
		for (int i = 0; i<input.length; i++){
			network.compute(input[i], result[i]);				
		}
		return result;
	}
	
	@Override
	public NetworkType getType() {
		return NetworkType.MLP;
	}		
	
}
