package com.anji.hyperneat.nd;

import java.io.IOException;
import java.util.List;
import org.apache.log4j.Logger;

import javax.xml.bind.JAXBException;

import org.jgap.Chromosome;

import com.anji.hyperneat.HyperNEATTranscriber;
import com.anji.hyperneat.nd.NDFloatArray.MatrixIterator;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDLR;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDLR.LearningRateGranularity;
import com.anji.hyperneat.onlinereinforcement.GridNetNDLR;
import com.anji.integration.Activator;
import com.anji.integration.ActivatorTranscriber;
import com.anji.integration.AnjiActivator;
import com.anji.integration.AnjiNetTranscriber;
import com.anji.integration.Transcriber;
import com.anji.integration.TranscriberException;
import com.anji.nn.AnjiNet;
import com.anji.nn.activationfunction.ActivationFunction;
import com.anji.nn.activationfunction.ActivationFunctionFactory;
import com.anji.topology.Layer;
import com.anji.topology.Topology;
import com.anji.topology.Topology.Hidden;
import com.anji.topology.Topology.Inputs;
import com.anji.topology.Topology.Outputs;
import com.anji.topology.XmlUtil;
import com.anji.util.ActivationFacade;
import com.anji.util.Configurable;
import com.anji.util.Properties;
import com.javamex.classmexer.MemoryUtil;

public class HyperNeatTranscriberND implements Transcriber<ActivatorND>, Configurable {
	public static final String HYPERNEAT_ACTIVATION_FUNCTION_KEY = "ann.hyperneat.activation.function";
	public static final String HYPERNEAT_FEED_FORWARD_KEY = "ann.hyperneat.feedforward";
	public static final String HYPERNEAT_ENABLE_BIAS = "ann.hyperneat.enablebias";
	public static final String HYPERNEAT_INCLUDE_DELTA = "ann.hyperneat.includedelta";
	public static final String HYPERNEAT_INCLUDE_ANGLE = "ann.hyperneat.includeangle";
	public static final String HYPERNEAT_LAYER_ENCODING = "ann.hyperneat.useinputlayerencoding";
	public static final String HYPERNEAT_CYCLES_PER_STEP = "ann.hyperneat.cyclesperstep";
	public static final String HYPERNEAT_CONNECTION_RANGE = "ann.hyperneat.connection.range";
	public static final String HYPERNEAT_CONNECTION_EXPRESSION_THRESHOLD = "ann.hyperneat.connection.expression.threshold";
	public static final String HYPERNEAT_CONNECTION_WEIGHT_MIN = "ann.hyperneat.connection.weight.min";
	public static final String HYPERNEAT_CONNECTION_WEIGHT_MAX = "ann.hyperneat.connection.weight.max";
	public static final String HYPERNEAT_TOPOLOGY_DESCRIPTOR_FILE = "ann.hyperneat.topology.descriptor.file";
	public static final String MAX_DIMENSIONS = "ann.hyperneat.topology.max.dimensions";
	public static final String USE_NDIMENSIONAL_LAYERS = "ann.hyperneat.use.ndimensional.layers";
	public static final String HYPERNEAT_ENABLE_LEARNING_RATE = "ann.hyperneat.enable.learning.rate";
	public static final String HYPERNEAT_ENABLE_LEARNING_RATE_DECAY = "ann.hyperneat.enable.learning.rate.decay";
	
	
	private final static Logger logger = Logger.getLogger(HyperNEATTranscriber.class);
	
	
	
	private AnjiNetTranscriber cppnTranscriber; // creates AnjiNets from
												// chromosomes
	private int genotypeRecurrentCycles;

	private ActivationFunction activationFunction;
	private boolean layerEncodingIsInput;
	private boolean feedForward;
	private int cyclesPerStep;
	private boolean enableBias;
	private boolean enableLearningRate;
	private boolean enableLearningRateDecay;
	private boolean includeDelta;
	private boolean includeAngle;
	private int connectionRange;
	private float connectionWeightMin;
	private float connectionWeightMax;
	private float connectionExprThresh;
	
	private int depth;
	private int maxDimensions;
	
	// Topology Xml classes
	private Inputs inputLayer;
	private Outputs outputLayer;
	private List<Hidden> hiddenLayers;
	private List<Layer> allLayersList;
	
	private boolean useTestFacade = false;
	
	public HyperNeatTranscriberND(Properties props) {
		init(props);
	}

	/**
	 * @param args
	 * @throws IOException 
	 * @throws TranscriberException 
	 */
	public static void main(String[] args) throws IOException, TranscriberException {
		Properties props = new Properties(args[0]);
		
		// Test CPPNInputs
		HyperNeatTranscriberND t = new HyperNeatTranscriberND(props);
//		t.init(props);
		/*
		t.testCppnInputs();
		
		int[] coords1 = {0,0,0,0,0,0,0,0};
		int[] coords2 = {0,1,0,0,0,0,0,0};
		
		// test isCoordsForBias
		System.out.println("t.isCoordsForBias(coords1): " + t.isCoordsForBias(coords1));
		System.out.println("t.isCoordsForBias(coords2): " + t.isCoordsForBias(coords2));

		int[] tgtCoords;
		// test getTgtCoordsFromCoordsSet
		System.out.println("t.getTgtCoordsFromCoordsSet(coords1): ");
		tgtCoords = t.getTgtCoordsFromCoordsSet(coords1);
		for (int i = 0; i < tgtCoords.length; i++) 
			System.out.println(tgtCoords[i]);
		System.out.println("t.getTgtCoordsFromCoordsSet(coords2): ");
		tgtCoords = t.getTgtCoordsFromCoordsSet(coords2);
		for (int i = 0; i < tgtCoords.length; i++) 
			System.out.println(tgtCoords[i]);
		
		// Test createBiasArray
		NDFloatArray[] biases = t.createBiasArray();
		
		// testsetBiasAtCoordsCppnInputs
		t.setBiasAtCoords(biases[0], coords1, 0.69f);
		

		// test createWeightArray
		NDFloatArray[] wgts = t.createWeightArray();
		 */

		// test newGridNetND
		Chromosome c = new Chromosome();
		ActivatorND net = t.newGridNetND(c, null);
		
		System.out.println(net.getName());
	}
	
	/**
	 * 
	 */
	@Override
	public void init(Properties props) {
		activationFunction = ActivationFunctionFactory.getInstance().get(props.getProperty(HYPERNEAT_ACTIVATION_FUNCTION_KEY));
	
		feedForward = props.getBooleanProperty(HYPERNEAT_FEED_FORWARD_KEY);
		if (!feedForward)
			cyclesPerStep = props.getIntProperty(HYPERNEAT_CYCLES_PER_STEP);
		layerEncodingIsInput = props.getBooleanProperty(HYPERNEAT_LAYER_ENCODING);
		enableBias = props.getBooleanProperty(HYPERNEAT_ENABLE_BIAS);
		enableLearningRate = props.getBooleanProperty(HYPERNEAT_ENABLE_LEARNING_RATE);
		enableLearningRateDecay = props.getBooleanProperty(HYPERNEAT_ENABLE_LEARNING_RATE_DECAY);
		
		
		includeDelta = props.getBooleanProperty(HYPERNEAT_INCLUDE_DELTA);
		includeAngle = props.getBooleanProperty(HYPERNEAT_INCLUDE_ANGLE);
	
		connectionRange = props.getIntProperty(HYPERNEAT_CONNECTION_RANGE);
		connectionExprThresh = props.getFloatProperty(HYPERNEAT_CONNECTION_EXPRESSION_THRESHOLD);
		connectionWeightMin = props.getFloatProperty(HYPERNEAT_CONNECTION_WEIGHT_MIN);
		connectionWeightMax = props.getFloatProperty(HYPERNEAT_CONNECTION_WEIGHT_MAX);
		String topologyDescriptorFile = props.getProperty(HYPERNEAT_TOPOLOGY_DESCRIPTOR_FILE);
		
		Topology topology = null;
		try {
			topology = XmlUtil.getTopologyDescriptor(topologyDescriptorFile);
		} catch (JAXBException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		inputLayer = topology.getInputs();
		outputLayer = topology.getOutputs();
		hiddenLayers = topology.getHidden();
		allLayersList = XmlUtil.getAllLayers(topology);
		
		// Input + Output layer + # hidden layers, if present
		depth = allLayersList.size();
		
		maxDimensions = inputLayer.getDimensions().size();
	
		cppnTranscriber = (AnjiNetTranscriber) props.singletonObjectProperty(AnjiNetTranscriber.class);
		genotypeRecurrentCycles = props.getIntProperty(ActivatorTranscriber.RECURRENT_CYCLES_KEY, 1);
	
	}

	public int getDepth() {return depth;}
	
	@Override
	public ActivatorND transcribe(Chromosome c) throws TranscriberException {
		return newGridNetND(c, null);
	}

	@Override
	public ActivatorND transcribe(Chromosome c, ActivatorND substrate) throws TranscriberException {
		return newGridNetND(c, substrate);
	}
	
	/**
	 * Creates an N-Dimensional GridNet, based on configuration file.
	 * Currently only supports feedforward networks without connecitonranges.
	 * 
	 * @param genotype
	 * @param phenotype
	 * @return
	 * @throws TranscriberException
	 */
	public ActivatorND newGridNetND(Chromosome genotype, ActivatorND phenotype) throws TranscriberException {
		// Transcribe cppn activator
		Activator cppnActivator = getCppnActivator(genotype);
		
		// Set whether we are creating a new phenotype or not
		boolean createNewPhenotype = (phenotype == null);
				
		// Create the weights and bias
		// array of Arrays, so support different size for each layer
		NDFloatArray[] weights = createNewPhenotype ? createWeightArray() : phenotype.getWeights();
		
		// array of Arrays, so support different size for each layer
		NDFloatArray[] bias = createNewPhenotype ? createBiasArray() : phenotype.getBias();
		
		NDFloatArray[] weightLearningRates = createWeightLearningRateArray(phenotype);
		NDFloatArray[] biasLearningRates = createBiasLearningRateArray(phenotype);
		NDFloatArray[] weightLearningRateDecays = createWeightLearningRateDecayArray(phenotype);
		NDFloatArray[] biasLearningRateDecays = createBiasLearningRateDecayArray(phenotype);
		
		
		// Query the CPPN for each weight and bias value
		
		for (int currentLayer = 1; currentLayer < depth; currentLayer++) {
			// Create a CppnInputs to manage cppn inputs for us
			CppnInputs ins = new CppnInputs(maxDimensions, allLayersList);	// cppnActivator.getInputDimension()[0] - probably a better way to do this
			
			for (MatrixIterator it = weights[currentLayer-1].iterator(); it.hasNext(); it.next() ) {
				// Since we created 
				int[] coords = it.getCurrentCoordinates();
			
				// build inputs for current weight
				ins.buildInputs(coords, currentLayer, currentLayer-1);	// 
				
				// Activate CPPN
				cppnActivator.reset();
				float[] cppnOutput = cppnActivator.next(ins.getInputs());	
				
				int[] cppnIdxW;			// cppn output index: weight
				// don't ask me why java makes you initialize some of these one and not the others.
				int[] cppnIdxB = null;	// cppn output index: bias
				int[] cppnIdxLR = null;		// cppn output index: weight learning rate - for online learners
				int[] cppnIdxBLR = null;		// cppn output index: bias learnin rate - for online learners
				int[] cppnIdxDR = null;		// cppn output index: weight learning rate decay - for online learners
				int[] cppnIdxBDR = null;		// cppn output index: bias learning rate decay - for online learners
				
				int cppnOutputIdx = 0;
				
				/* Layer encoding? if the current layer is an input to the CPPN, output will be one value per
				 * weight, bias, learning rate, learning decay rate 
				 */
				if (layerEncodingIsInput) {
					cppnIdxW = new int[1];
					cppnIdxW[0] = cppnOutputIdx++; // weight value 
					
					if (enableBias) {
						cppnIdxB = new int[1];
						cppnIdxB[0] = cppnOutputIdx++; // bias value
					}
					
					/*
					 * currently, if you have enableLearningRate and enableBias turned on, this assumes
					 * you want a learning rate for both the weights and the bias; similarly for learning rate
					 * decays.  It would be possible to only have learning rates / decays for either weights
					 * or biases, but if you want such permutations, you'll need to roll your own.
					 */
					
					if (enableLearningRate) {
						cppnIdxLR = new int[1];
						cppnIdxLR[0] = cppnOutputIdx++; // weight learning rate value
						
						if (enableBias) {
							cppnIdxBLR = new int[1];
							cppnIdxBLR[0] = cppnOutputIdx++; // bias learning rate value
						}
						
						if (enableLearningRateDecay) {
							cppnIdxDR = new int[1];
							cppnIdxDR[0] = cppnOutputIdx++; // weight learning rate decay value
							
							if (enableBias) {
								cppnIdxBDR = new int[1];
								cppnIdxBDR[0] = cppnOutputIdx++; // bias learning rate decay value
							}
							
						}
					}
										
				/*
				 * If Layer encoding is not used, there will be a weight, bias, learning rate, 
				 * and learning rate decay for each layer.
				 * Note that this code has been translated and extended from Coleman's 
				 * original GridNet transcriber, but I haven't really tested it. -Shaun
				 */
				} else { //one output per layer
					cppnIdxW = new int[depth-1];
					for (int w = 0; w < depth-1; w++)
						cppnIdxW[w] = cppnOutputIdx++; // weight value 
					
					if (enableBias) {
						cppnIdxB = new int[depth-1];
						for (int w = 0; w < depth-1; w++)
							cppnIdxB[w] = cppnOutputIdx++; // bias value 
					}
					
					if (enableLearningRate) {
						cppnIdxLR = new int[depth-1];
						for (int w = 0; w < depth-1; w++)
							cppnIdxLR[w] = cppnOutputIdx++; // weight learning rate value
						
						if (enableBias) {
							cppnIdxBLR = new int[depth-1];
							for (int w = 0; w < depth-1; w++)
								cppnIdxBLR[w] = cppnOutputIdx++; // bias learning rate value
						}
						
						/*
						 * currently, if you have enableLearningRate and enableBias turned on, this assumes
						 * you want a learning rate for both the weights and the bias; similarly for learning rate
						 * decays.  It would be possible to only have learning rates / decays for either weights
						 * or biases, but if you want such permutations, you'll need to roll your own.
						 */
						
						if (enableLearningRateDecay) {
							cppnIdxDR = new int[depth-1];
							for (int w = 0; w < depth-1; w++)
								cppnIdxDR[w] = cppnOutputIdx++; // weight learning rate decay value
							
							if (enableBias) {
								cppnIdxBDR = new int[depth-1];
								for (int w = 0; w < depth-1; w++)
									cppnIdxBDR[w] = cppnOutputIdx++; // bias learning rate decay value
							}
							
						}
					}
					
					
				}	
				
				// Get the weight value, adjust for min/max values and for connection expression threshold
				
				float weightVal;
				// Handle case of layer encoding is input
				if (layerEncodingIsInput)
					weightVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxW[0]]));
				else 
					weightVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxW[currentLayer-1]]));
				
				// check if wgt val is > connection expression threshold
				if (Math.abs(weightVal) > connectionExprThresh) {
					// if so, normalize it in the appropriate direction +/-
					if (weightVal > 0)
						weightVal = (weightVal - connectionExprThresh) * (connectionWeightMax / (connectionWeightMax - connectionExprThresh));
					else
						weightVal = (weightVal + connectionExprThresh) * (connectionWeightMin / (connectionWeightMin + connectionExprThresh));

					it.set(weightVal);
				} 
				// Otherwise, connection not strong enough, set to 0
				else {
					it.set(0);
				}
				
				
				// Get the Learning Rate for the current weight
				if (enableLearningRate) {
					if (layerEncodingIsInput) 
						weightVal = cppnOutput[cppnIdxLR[0]];
					else
						weightVal = cppnOutput[cppnIdxLR[currentLayer-1]];
					
					weightLearningRates[currentLayer-1].set(weightVal, it.getCurrentCoordinates());
					
					if (enableLearningRateDecay) {
						if (layerEncodingIsInput) 
							weightVal = cppnOutput[cppnIdxDR[0]];
						else
							weightVal = cppnOutput[cppnIdxDR[currentLayer-1]];
						
						weightLearningRateDecays[currentLayer-1].set(weightVal, it.getCurrentCoordinates());
					}						
				}
				
		
				// Get Bias for current layer
				if (enableBias && isCoordsForBias(coords)) {
					float biasVal;
					if (layerEncodingIsInput)
						biasVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxB[0]]));
					else
						biasVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxB[currentLayer-1]]));
					if (Math.abs(biasVal) > connectionExprThresh) {
						if (biasVal > 0)
							biasVal = (biasVal - connectionExprThresh) * (connectionWeightMax / (connectionWeightMax - connectionExprThresh));
						else
							biasVal = (biasVal + connectionExprThresh) * (connectionWeightMin / (connectionWeightMin + connectionExprThresh));

//						bias[currentLayer-1][ty][tx] = biasVal;
						setBiasAtCoords(bias[currentLayer-1], coords, biasVal);
					} else {
						setBiasAtCoords(bias[currentLayer-1], coords, 0);
					}
					
					// Get the Learning Rate for the current bias
					if (enableLearningRate) {
						if (layerEncodingIsInput) 
							biasVal = cppnOutput[cppnIdxBLR[0]];
						else
							biasVal = cppnOutput[cppnIdxBLR[currentLayer-1]];
						
						biasLearningRates[currentLayer-1].set(biasVal, it.getCurrentCoordinates());
						
						if (enableLearningRateDecay) {
							if (layerEncodingIsInput) 
								biasVal = cppnOutput[cppnIdxBDR[0]];
							else
								biasVal = cppnOutput[cppnIdxBDR[currentLayer-1]];
							
							biasLearningRateDecays[currentLayer-1].set(biasVal, it.getCurrentCoordinates());
						}						
					}
				}
			}
		}
		
		// NO - Set connection ranges for Gridnet
		
		
		// create gridnet
		if (createNewPhenotype) {
			if (enableLearningRate) {
				phenotype = new GridNetNDLR(allLayersList
						, weights
						, bias
						, weightLearningRates
						, biasLearningRates
						, weightLearningRateDecays
						, biasLearningRateDecays
						, activationFunction
						, maxDimensions
						, 0
						, enableBias
						, "network "+genotype.getId() 
						, LearningRateGranularity.UNIT
						, LearningRateGranularity.UNIT
						, enableLearningRateDecay);
			} else {
				phenotype = new GridNetND(allLayersList
						, weights
						, bias
						, activationFunction
						, maxDimensions
						, 0
						, enableBias
						, "network "+genotype.getId());
			}
			logger.info("Substrate has " + phenotype.getConnectionCount(true) + " connections.");
			
			try {
				logger.info("Substrate memory size: " + (int) Math.round(MemoryUtil.deepMemoryUsageOf(phenotype) / 1024.0) + "Kb.");
			}
			catch (IllegalStateException e) {};
		}
		else {
			phenotype.setName("network " + genotype.getId());
		}
		
		
		return phenotype;
	}

	private NDFloatArray[] createWeightLearningRateArray(ActivatorND phenotype) {
		if (!enableLearningRate) return null;
		if (phenotype != null) {
			ActivatorNDLR a = (ActivatorNDLR)phenotype;
			return a.getWeightLearningRates();
		} else {
			return createWeightArray();
		}
	}
	private NDFloatArray[] createBiasLearningRateArray(ActivatorND phenotype) {
		if (!enableLearningRate || !enableBias) return null;
		if (phenotype != null) {
			ActivatorNDLR a = (ActivatorNDLR)phenotype;
			return a.getBiasLearningRates();
		} else {
			return createBiasArray();
		}
	}
	private NDFloatArray[] createWeightLearningRateDecayArray(ActivatorND phenotype) {
		if (!enableLearningRate || !enableLearningRateDecay) return null;
		if (phenotype != null) {
			ActivatorNDLR a = (ActivatorNDLR)phenotype;
			return a.getWeightLearningRateDecays();
		} else {
			return createWeightArray();
		}
	}
	private NDFloatArray[] createBiasLearningRateDecayArray(ActivatorND phenotype) {
		if (!enableLearningRate || !enableBias || !enableLearningRateDecay) return null;
		if (phenotype != null) {
			ActivatorNDLR a = (ActivatorNDLR)phenotype;
			return a.getBiasLearningRateDecays();
		} else {
			return createBiasArray();
		}
	}
	
	/**
	 * 
	 * @param bias
	 * @param coords
	 * @param biasVal
	 */
	private void setBiasAtCoords(NDFloatArray bias, int[] coords, float biasVal) {
		bias.set(biasVal, getTgtCoordsFromCoordsSet(coords));
	}
	
	/**
	 * 
	 * @param coords
	 * @return
	 */
	private int[] getTgtCoordsFromCoordsSet(int[] coords) {
		int[] tgt = new int[maxDimensions];
		for (int i = 0; i < tgt.length; i++) tgt[i] = coords[i];
		return tgt;
	}

	/**
	 * Checks if the CPPN bias node should be queried; note that this will break
	 * if the dimension of the target layer is smaller than its corresponding 
	 * dimension in the source layer (as in, this will return false in every such
	 * case, and some/all bias values will never be populated); this should be fixed
	 * with better logic.
	 * @param coords The set of coordinates for the weight matrix.
	 * @return
	 */
	private boolean isCoordsForBias(int[] coords) {
		for (int i = 0; i < maxDimensions; i++) {
			if (coords[i] != coords[i+maxDimensions]) return false;
		}
		return true;
	}

	/**
	 * @param phenotype
	 * @param createNewPhenotype
	 * @return 
	 */
	private NDFloatArray[] createWeightArray() {
		NDFloatArray[] weights = new NDFloatArray[depth-1];
		for (int l = 1; l < depth; l++)	{// Start with second layer go to end
			// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
			int[] lastLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l-1).getDimensions());
			int[] currentLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l).getDimensions());
			
			// Cast the weight array to dimensions of more dimensional layer
			// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
			//int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
//			int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
			
			weights[l-1] = new NDFloatArray(
					maxDimensions
					, currentLayerDimensions
					, lastLayerDimensions
					
			);
		}
		return weights;
	}
	
	/**
	 * 
	 * @param genotype
	 * @return
	 * @throws TranscriberException
	 */
	private Activator getCppnActivator(Chromosome genotype) throws TranscriberException {
		Activator cppnActivator = null;
		if (useTestFacade) {
			// Hacktastic

			int outCount = 1;
			if (layerEncodingIsInput) outCount += 1;
			else outCount += depth-1;
			CppnInputs tempIns = new CppnInputs(maxDimensions, allLayersList);
			
			cppnActivator = new ActivationFacade(new int[] {tempIns.inputs.length}
					,new int[] {outCount}
					,connectionWeightMin
					,connectionWeightMax
			);
				
		} else {
			AnjiNet cppn = cppnTranscriber.transcribe(genotype);
			cppnActivator = new AnjiActivator(cppn, genotypeRecurrentCycles);
		}
		return cppnActivator;
	}

	/**
	 * @return
	 */
	private NDFloatArray[] createBiasArray() {
		NDFloatArray[] bias;
		bias = new NDFloatArray[depth - 1];

//		int l = 0;
		// create the bias based on the dimensions of each hidden layer
//		for (Hidden layer : hiddenLayers) {
//			int a[] = new int[0];
//			bias[l++] = new NDFloatArray(
//					XmlUtil.convertBigIntListToArray(layer.getDimensions())
//			);
//		}
//		
//		// And create the bias for the last layer.
//		bias[l] =  new NDFloatArray( 
//			XmlUtil.convertBigIntListToArray(outputLayer.getDimensions())
//		);
		
		for (int l = 1; l < depth; l++)	{// Start with second layer go to end
			// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
			int[] lastLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l-1).getDimensions());
			int[] currentLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l).getDimensions());
			
			// Cast the weight array to dimensions of more dimensional layer
			// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
			//int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
//			int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
			
			// Cast it to maxDimension size
			bias[l-1] = new NDFloatArray(maxDimensions, currentLayerDimensions);
		}
		
		return bias;
	}
	
	
	/**
	 * 
	 */
	@Override
	public Class getPhenotypeClass() {
		return GridNetND.class;
	}

	public void testCppnInputs() {
		
		CppnInputs ins = new CppnInputs(maxDimensions, allLayersList);
	
		// Test CPPNInputs.getNormalizedCoordinates
		int[] coords = {3, 4, 2, 0, 5, 7, 6, 1};
		int layer = 2; 
		float[] normCoords = ins.getNormalizedCoordinates(coords, layer);
		
		// Test CPPNInputs.buildInputs
		ins.buildInputs(coords, layer, -1);
	
		float[] inputs = ins.getInputs();
		
		System.out.println();
	}

	/**
	 * 
	 * @author slusk
	 *
	 */
	private class CppnInputs {
		private int IDX_bias;
		private int[] IDX_tgtCoords;
		private int[] IDX_srcCoords;
		private int IDX_tgtLayer;
		private int IDX_srcLayer;
		private int IDX_layerDelta;
		private int[] IDX_coordDeltas;
		private int[] IDX_angles;
		
		private int numDimensions;

		int[][] allDimensions;
		
		float[] inputs;
		
		/**
		 * 
		 * @param numDimensions
		 * @param allLayersList
		 */
		public CppnInputs(int numDimensions, List<Layer> allLayers) {
			this.numDimensions = numDimensions;

			int numCppnInputs = setupInputIndexes(numDimensions);
			
			inputs = new float[numCppnInputs];
			
			allDimensions = new int[allLayers.size()][];
			for (int i = 0; i < allDimensions.length; i++) {
				allDimensions[i] = XmlUtil.convertByteListToArray(
						allLayers.get(i)
						.getDimensions(), numDimensions);
			}
		}

		/**
		 * 
		 * @param numDimensions
		 * @return
		 */
		private int setupInputIndexes(int numDimensions) {
			int idx = 0; 
			IDX_bias = idx++;
			IDX_tgtCoords = new int[numDimensions];
			for (int i = 0; i < numDimensions; i++) 
				IDX_tgtCoords[i] = idx++;
			
			IDX_srcCoords = new int[numDimensions];
			for (int i = 0; i < numDimensions; i++) 
				IDX_srcCoords[i] = idx++;
			
			if (layerEncodingIsInput && allLayersList.size() > 2) {
				IDX_tgtLayer = idx++;
				if (!feedForward) {
					IDX_srcLayer = idx++;
					if (includeDelta)
						IDX_layerDelta = idx++;
				}
			}
			
			if (includeDelta) {
				IDX_coordDeltas = new int[numDimensions];
				for (int i = 0; i < numDimensions; i++) 
					IDX_coordDeltas[i] = idx++;
			}
			
			if (includeAngle) {
				IDX_angles = new int[numDimensions-1];
				for (int i = 0; i < numDimensions-1; i++) 
					IDX_angles[i] = idx++;
			}
			
			return idx;
		}
		
		/**
		 * 
		 * @param coords
		 * @param layer
		 * @param sourceLayer
		 */
		public void buildInputs(int[] coords, int layer, int sourceLayer) {
			// Set bias
			inputs[IDX_bias] = 1;
			
			float[] normCoords = getNormalizedCoordinates(coords, layer);
			
			for (int i = 0; i < numDimensions; i++) {
				inputs[IDX_tgtCoords[i]] = normCoords[i];
			}
			for (int i = 0; i < numDimensions; i++) {
				inputs[IDX_srcCoords[i]] = normCoords[i+numDimensions];
			}
			
			if (layerEncodingIsInput && allDimensions.length > 2) {
				inputs[IDX_tgtLayer] =((float)layer) / (allDimensions.length-1);
				if (!feedForward) {
					inputs[IDX_srcLayer] = ((float)sourceLayer) / (allDimensions.length-1);
					if (includeDelta) 
						inputs[IDX_layerDelta] = inputs[IDX_tgtLayer] - inputs[IDX_srcLayer];
				}
			}

			if (includeDelta) {
				for (int i = 0; i < numDimensions; i++) {
					inputs[IDX_coordDeltas[i]] = normCoords[i] - normCoords[i+numDimensions];
				}
			}
			if (includeAngle) {
				for (int d = 1; d < numDimensions; d++) {
					float angle = (float) Math.atan2(normCoords[d-1] - normCoords[d-1+numDimensions], normCoords[d] - normCoords[d+numDimensions]);
					
					angle /= 2 * (float) Math.PI;
					if (angle < 0)
						angle += 1;
					
					inputs[IDX_angles[d-1]] = angle; 
				}
			}
		}
		
		/**
		 * 
		 * @param coords
		 * @param layer
		 * @return
		 */
		private float[] getNormalizedCoordinates(int[] coords, int layer) {
			float[] normalizedCoords = new float[coords.length];
			int idx = 0;
			// Target
			for (int d = 0; d < numDimensions; d++) {
				if (allDimensions[layer][d] > 1)
					normalizedCoords[idx++] = ((float)coords[d]) / (allDimensions[layer][d]-1);
				else 
					normalizedCoords[idx++] = 0.5f;
			}
			// Source
			for (int d = 0; d < numDimensions; d++) {
				if (allDimensions[layer-1][d] > 1)
					normalizedCoords[idx++] = ((float)coords[d+numDimensions]) / (allDimensions[layer-1][d]-1);
				else 
					normalizedCoords[idx++] = 0.5f;
			}
			return normalizedCoords;
		}
		
		/**
		 * 
		 * @return
		 */
		public float[] getInputs() {
			return inputs;
		}
	}

}
