package NeuralAbstraction;


import org.encog.mathutil.rbf.RBFEnum;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.ml.train.MLTrain;
import org.encog.neural.pattern.RadialBasisPattern;
import org.encog.neural.rbf.RBFNetwork;
import org.encog.neural.rbf.training.SVDTraining;


public class RbfNetwork extends NeuralNetwork {
	RBFNetwork network;
	MLDataSet trainingSet;
	MLTrain train;	
	

	RbfNetwork(double[][] inputData, double[][] targetData, int hiddenNeuronsCount, String name) throws Exception{		
		super.name = name;
		super.inputsCount = inputData[0].length;
		super.outputsCount = targetData[0].length;
		super.layersCount = 3;
	
		if (targetData[0].length > 1)
			throw new Exception("only 1 output currently supported for rbf network");
		
		int numNeuronsPerDimension = (int) Math.sqrt(hiddenNeuronsCount);
		int dimensions = 2;

		// Set the standard RBF neuron width.
		// Literature seems to suggest this is a good default value.
		double volumeNeuronWidth = 2.0 / numNeuronsPerDimension;

		// RBF can struggle when it comes to flats at the edge of the sample space.
		// We have added the ability to include wider neurons on the sample space boundary which greatly improves fitting to flats
		boolean includeEdgeRBFs = true;

		// General setup is the same as before
		RadialBasisPattern pattern = new RadialBasisPattern();
		pattern.setInputNeurons(dimensions);
		pattern.setOutputNeurons(1);

		// Total number of neurons required.
		int numNeurons = (int) Math.pow(numNeuronsPerDimension, dimensions);
		pattern.addHiddenLayer(numNeurons);
		network = (RBFNetwork) pattern.generate();
		// Position the multidimensional RBF neurons, with equal spacing, within
		// the provided sample space from 0 to 1.
		network.setRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian,
				volumeNeuronWidth, includeEdgeRBFs);			
		
		trainingSet = new BasicMLDataSet(inputData, targetData);
	}
	
	int trainNetwork(double maxError){	
		train = new SVDTraining(network, trainingSet);
		// SVD is a single step solve
		int epoch = 1;
		do {
			train.iteration();
			System.out.println("Epoch #" + epoch + " Error:" + train.getError());
			epoch++;
		} while ((epoch < 1) && (train.getError() > maxError));
		super.isTrained = true;		
		return epoch;
	}
	
	@Override
	public double[][] computeResult(double[][] input){	
		double[][] result = new double[input.length][super.outputsCount];
		MLDataSet set = new BasicMLDataSet(input, result);
		for (int i = 0; i<input.length; i++){
			result[i] = network.compute(set.get(i).getInput()).getData().clone();			
		}			
		return result;
	}
	
	@Override
	public NetworkType getType() {
		return NetworkType.RBF;
	}

	
	
	public static double INPUT[][] = { { 0.0, 0.0 }, { 1.0, 1.0 },
		{ 0.0, 1.0 }, { 1.0, 0.0 } };

	public static double IDEAL[][] = { { 0.1 }, { 0.1 }, { 0.5 }, { 0.5 } };
//
//	public static void main(String[] args) {		
//		try {
//			RbfNetwork net = new RbfNetwork(INPUT, IDEAL, 2, "aaa");
//			net.trainNetwork(0.00001);
//			double[][] result = net.computeResult(INPUT);
//			for (int i = 0; i< result.length; i++){
//				System.out.println(result[i][0]);
//			}
//		} catch (Exception e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}	
//	}




}
