package com.anji.hyperneat.nd;
//package com.anji.hyperneat;
//
//import java.util.List;
//
//import javax.xml.bind.JAXBException;
//
//import org.apache.log4j.Logger;
//import org.jgap.Chromosome;
//
//import com.anji.hyperneat.NDFloatArray.MatrixIterator;
//import com.anji.integration.ActivatorTranscriber;
//import com.anji.integration.AnjiActivator;
//import com.anji.integration.AnjiNetTranscriber;
//import com.anji.integration.Transcriber;
//import com.anji.integration.TranscriberException;
//import com.anji.nn.AnjiNet;
//import com.anji.nn.activationfunction.ActivationFunction;
//import com.anji.nn.activationfunction.ActivationFunctionFactory;
//import com.anji.topology.Hidden;
//import com.anji.topology.Inputs;
//import com.anji.topology.Layer;
//import com.anji.topology.Outputs;
//import com.anji.topology.TopologyDescriptor;
//import com.anji.topology.XmlUtil;
//import com.anji.util.Configurable;
//import com.anji.util.Properties;
//import com.javamex.classmexer.MemoryUtil;
//
//public class HyperNEATTranscriberNDOld implements Transcriber<GridNetND>, Configurable {
//	public static final String HYPERNEAT_ACTIVATION_FUNCTION_KEY = "ann.hyperneat.activation.function";
//	public static final String HYPERNEAT_FEED_FORWARD_KEY = "ann.hyperneat.feedforward";
//	public static final String HYPERNEAT_ENABLE_BIAS = "ann.hyperneat.enablebias";
//	public static final String HYPERNEAT_INCLUDE_DELTA = "ann.hyperneat.includedelta";
//	public static final String HYPERNEAT_INCLUDE_ANGLE = "ann.hyperneat.includeangle";
//	public static final String HYPERNEAT_LAYER_ENCODING = "ann.hyperneat.useinputlayerencoding";
//	public static final String HYPERNEAT_CYCLES_PER_STEP = "ann.hyperneat.cyclesperstep";
//	public static final String HYPERNEAT_CONNECTION_RANGE = "ann.hyperneat.connection.range";
//	public static final String HYPERNEAT_CONNECTION_EXPRESSION_THRESHOLD = "ann.hyperneat.connection.expression.threshold";
//	public static final String HYPERNEAT_CONNECTION_WEIGHT_MIN = "ann.hyperneat.connection.weight.min";
//	public static final String HYPERNEAT_CONNECTION_WEIGHT_MAX = "ann.hyperneat.connection.weight.max";
//	public static final String HYPERNEAT_TOPOLOGY_DESCRIPTOR_FILE = "ann.hyperneat.topology.descriptor.file";
//	
//	private final static Logger logger = Logger.getLogger(HyperNEATTranscriber.class);
//
//	private AnjiNetTranscriber cppnTranscriber; // creates AnjiNets from
//												// chromosomes
//	private int genotypeRecurrentCycles;
//
//	private ActivationFunction activationFunction;
//	private boolean feedForward;
//	private int cyclesPerStep;
//	private boolean enableBias;
//	private boolean includeDelta;
//	private boolean includeAngle;
//	private int connectionRange;
//	private float connectionWeightMin;
//	private float connectionWeightMax;
//	private float connectionExprThresh;
//	private boolean layerEncodingIsInput = false;
//	private int depth;
//	private int maxDimensions;
//
//	// Topology Xml classes
//	private Inputs inputLayer;
//	private Outputs outputLayer;
//	private List<Hidden> hiddenLayers;
//	private List<Layer> allLayers;
//	
//	public HyperNEATTranscriberNDOld() {
//	}
//
//	public HyperNEATTranscriberNDOld(Properties props) {
//		init(props);
//	}
//
//	/**
//	 * @see Configurable#init(Properties)
//	 */
//	public void init(Properties props) {
//		activationFunction = ActivationFunctionFactory.getInstance().get(props.getProperty(HYPERNEAT_ACTIVATION_FUNCTION_KEY));
//
//		feedForward = props.getBooleanProperty(HYPERNEAT_FEED_FORWARD_KEY);
//		if (!feedForward)
//			cyclesPerStep = props.getIntProperty(HYPERNEAT_CYCLES_PER_STEP);
//
//		enableBias = props.getBooleanProperty(HYPERNEAT_ENABLE_BIAS);
//
//		includeDelta = props.getBooleanProperty(HYPERNEAT_INCLUDE_DELTA);
//		includeAngle = props.getBooleanProperty(HYPERNEAT_INCLUDE_ANGLE);
//		
//		layerEncodingIsInput = props.getBooleanProperty(HYPERNEAT_LAYER_ENCODING, layerEncodingIsInput);
//
//		connectionRange = props.getIntProperty(HYPERNEAT_CONNECTION_RANGE);
//		connectionExprThresh = props.getFloatProperty(HYPERNEAT_CONNECTION_EXPRESSION_THRESHOLD);
//		connectionWeightMin = props.getFloatProperty(HYPERNEAT_CONNECTION_WEIGHT_MIN);
//		connectionWeightMax = props.getFloatProperty(HYPERNEAT_CONNECTION_WEIGHT_MAX);
//		String topologyDescriptorFile = props.getProperty(HYPERNEAT_TOPOLOGY_DESCRIPTOR_FILE);
//		
//		TopologyDescriptor topology = null;
//		try {
//			topology = XmlUtil.getTopologyDescriptor(topologyDescriptorFile);
//		} catch (JAXBException e) {
//			// TODO Auto-generated catch block
//			e.printStackTrace();
//		}
//		
//		inputLayer = topology.getInputs();
//		outputLayer = topology.getOutputs();
//		hiddenLayers = topology.getHidden();
//		allLayers = XmlUtil.getAllLayers(topology);
//		
//		// Input + Output layer + # hidden layers, if present
//		depth = allLayers.size();
//		
//		maxDimensions = inputLayer.getDimensions().size();
//		
//		/*
//		depth = props.getIntProperty(HYPERNEAT_DEPTH);
//		String[] heightStr = props.getProperty(HYPERNEAT_HEIGHT).split(",");
//		String[] widthStr = props.getProperty(HYPERNEAT_WIDTH).split(",");
//		height = new int[depth];
//		width = new int[depth];
//		for (int l = 0; l < depth; l++) {
//			height[l] = Integer.parseInt(heightStr[l]);
//			width[l] = Integer.parseInt(widthStr[l]);
//		}
//		*/
//
//		cppnTranscriber = (AnjiNetTranscriber) props.singletonObjectProperty(AnjiNetTranscriber.class);
//		genotypeRecurrentCycles = props.getIntProperty(ActivatorTranscriber.RECURRENT_CYCLES_KEY, 1);
//
//	}
//
//	/**
//	 * @see Transcriber#transcribe(Chromosome)
//	 */
//	public GridNetND transcribe(Chromosome genotype) throws TranscriberException {
//		return newGridNetND(genotype, null);
//	}
//
//	/**
//	 * @see Transcriber#transcribe(Chromosome, T substrate)
//	 */
//	public GridNetND transcribe(Chromosome genotype, GridNetND substrate) throws TranscriberException {
//		return newGridNetND(genotype, substrate);
//	}
//
//	/**
//	 * create new <code>GridNet</code> from <code>genotype</code>
//	 * 
//	 * @param genotype
//	 *            chromosome to transcribe
//	 * @return phenotype
//	 * @throws TranscriberException
//	 */
//	public GridNetND newGridNetND(Chromosome genotype, GridNetND phenotype) throws TranscriberException {
//		AnjiNet cppn = cppnTranscriber.transcribe(genotype);
//		AnjiActivator cppnActivator = new AnjiActivator(cppn, genotypeRecurrentCycles);
//		
//		boolean createNewPhenotype = (phenotype == null);
//
//		CppnInputDescriptor ins = new CppnInputDescriptor(maxDimensions, cppn.getInputDimension());
//		
//		NDFloatArray[] weights;
//
//		// array of Arrays, so support different size for each layer
//		NDFloatArray[] bias = createNewPhenotype ? createBiasArray() : phenotype.getBias();
//		
//		
////		float cppnInputTgtLayer, cppnTY, cppnTX, cppnInputSrcLayer, cppnSY, cppnSX;
//		float cppnInputTgtLayer, cppnInputSrcLayer;
//		int[] cppnInputTgtDim;
//		float[] cppnInputCoordDeltas;
//		
//		
//		if (feedForward) {
//			
//			/* *********************************************************
//			 * *** BEGIN create wgts
//			 * ********************************************************* 
//			 */
//			if (createNewPhenotype) {
////				weights = new float[depth - 1][][][][][];
//				weights = new NDFloatArray[depth-1];
//				for (int l = 1; l < depth; l++)	{// Start with second layer go to end
////					weights[l-1] = new float[height[l]][width[l]][1][][];	// weights for current layer pair, set to new array
//					// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
//					int[] lastLayerDimensions = XmlUtil.convertBigIntListToArray(allLayers.get(l-1).getDimensions());
//					int[] currentLayerDimensions = XmlUtil.convertBigIntListToArray(allLayers.get(l).getDimensions());
//					// Cast the weight array to dimensions of more dimensional layer
//					// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
//					//int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
//					int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
//					weights[l-1] = new NDFloatArray(
//							dimCastSize
//							, currentLayerDimensions
//							, lastLayerDimensions
//							
//					);
//				}
//			}
//			else {
//				weights = phenotype.getWeights();
//			}
//			/* *********************************************************
//			 * *** end create wgts
//			 * ********************************************************* 
//			 */
//			
//			// query CPPN for substrate connection weights
//			for (int currentLayer = 1; currentLayer < depth; currentLayer++) {
//				if (depth > 2 && layerEncodingIsInput) {
//					cppnInputTgtLayer = ((float) currentLayer) / (depth - 1);	// Normalizing
//					cppnInput[cppnIdxTgtLayer] = cppnInputTgtLayer;
//				}
//				
//				// for each layer ^
//				// figure out size of wgts between this layer and the previous
//				for (MatrixIterator it = weights[currentLayer].iterator(); it.hasNext(); it.next() ) {
//					// Since we created 
//					int[] coordinates = it.getCurrentCoordinates();
//					// Coords 0-n = target; coords n-m = source
//					
//					/*
//					 * 	int cppnIdxSrcCoords[] = new int[inputLayer.getDimensions().size()]; 
//						int cppnIdxTgtCoords[] = new int[inputLayer.getDimensions().size()];
//						int cppnIdxCoordDeltas[] = new int[inputLayer.getDimensions().size()];
//						int cppnIdxAngles[] = new int[inputLayer.getDimensions().size() - 1];
//						
//						int cppnInputIdx = 1; // 0 is always bias
//		
//					 */
//					
//					// First do coords for target node
//					for (int i = 0; i < cppnIdxTgtCoords.length; i++) 
//						cppnInput[cppnIdxTgtCoords[i]] = coordinates[i];
//					
//					// Then do coords for source nodes
//					for (int i = 0; i < cppnIdxSrcCoords.length; i++) 
//						cppnInput[cppnIdxSrcCoords[i]] = coordinates[i+cppnIdxTgtCoords.length];
//					
//					// Now coord for target layer
//					cppnInput[cppnIdxTgtLayer] = currentLayer;
//					
//					// Now coord for source layer
//					cppnInput[cppnIdxSrcLayer] = currentLayer - 1;
//					
//					// This is feedforward section, so skip layer delta, it's always 1
//					
//					// If include deltas, then do coordinate deltas
//					if (includeDelta) {
//						// dimensions of each layer should have been cast to a minimum number of dimensions equal to the max number of dimensions of any layer.
//						for (int i = 0; i < coordinates.length; i++) {
//							int tgtCoord = coordinates[i];
//							int srcCoord = coordinates[i+cppnIdxTgtCoords.length];
//							int delta = tgtCoord - srcCoord;
//							cppnInput[cppnIdxCoordDeltas[i]] = delta;
//						}
//					}
//					
//					// If include angles, do angles.
//					if (includeAngle) {
//						
//					}
//					
//					// TODO need a bounds check; set input lengths = input layer size, but actual layers may be smaller
//					
//					
//					// Activate cppn for wgt val
//					cppnActivator.reset();
//					float[] cppnOutput = cppnActivator.next(cppnInput);
//
//					//set the weight val as appropriate
//				
//					float weightVal;
//					// Handle case of layer encoding is input
//					if (layerEncodingIsInput)
//						weightVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxW[0]]));
//					else 
//						weightVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxW[currentLayer-1]]));
//					
//					// check if wgt val is > connection expression threshold
//					if (Math.abs(weightVal) > connectionExprThresh) {
//						// if so, normalize it in the appropriate direction +/-
//						if (weightVal > 0)
//							weightVal = (weightVal - connectionExprThresh) * (connectionWeightMax / (connectionWeightMax - connectionExprThresh));
//						else
//							weightVal = (weightVal + connectionExprThresh) * (connectionWeightMin / (connectionWeightMin + connectionExprThresh));
//
//						it.set(weightVal);
//					} 
//					// Otherwise, connection not strong enough, set to 0
//					else {
//						it.set(0);
//					}
//					
//					//bias
//					if (enableBias && sy == ty && sx == tx) {
//						float biasVal;
//						if (layerEncodingIsInput)
//							biasVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxB[0]]));
//						else
//							biasVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxB[currentLayer-1]]));
//						if (Math.abs(biasVal) > connectionExprThresh) {
//							if (biasVal > 0)
//								biasVal = (biasVal - connectionExprThresh) * (connectionWeightMax / (connectionWeightMax - connectionExprThresh));
//							else
//								biasVal = (biasVal + connectionExprThresh) * (connectionWeightMin / (connectionWeightMin + connectionExprThresh));
//
//							bias[currentLayer-1][ty][tx] = biasVal;
//						} else {
//							bias[currentLayer-1][ty][tx] = 0;
//						}
//					}
//				}
//				
//				
//				for (int ty = 0; ty < height[currentLayer]; ty++) {
//					if (height[currentLayer] > 1)
//						cppnTY = ((float) ty) / (height[currentLayer] - 1);	// Normalizing
//					else
//						cppnTY = 0.5f;
//					cppnInput[cppnIdxTY] = cppnTY;
//
//					for (int tx = 0; tx < width[currentLayer]; tx++) {
//						if (width[currentLayer] > 1)
//							cppnTX = ((float) tx) / (width[currentLayer] - 1); 	// Normalizing
//						else
//							cppnTX = 0.5f;
//						cppnInput[cppnIdxTX] = cppnTX;
//
//						// *** No connection limiting for now
//						// calculate dimensions of this weight target matrix
//						// (bounded by grid edges)
////						int dy = Math.min(height[currentLayer-1] - 1, ty + connectionRange) - Math.max(0, ty - connectionRange) + 1;
////						int dx = Math.min(width[currentLayer-1] - 1, tx + connectionRange) - Math.max(0, tx - connectionRange) + 1;
//						
//
//						if (createNewPhenotype)
//							weights[currentLayer - 1][ty][tx][0] = new float[dy][dx];
//						float[][] w = weights[currentLayer - 1][ty][tx][0];
//						
//
//						// for each connection to zyx
//						// w{y,x} is index into weight matrix
//						// s{y,x} is index of source neuron
//						for (int wy = 0, sy = Math.max(0, ty - connectionRange); wy < dy; wy++, sy++) {
//							if (height[currentLayer-1] > 1)
//								cppnSY = ((float) sy) / (height[currentLayer-1] - 1);	// Normalizing
//							else
//								cppnSY = 0.5f;
//								
//							cppnInput[cppnIdxSY] = cppnSY;
//
//							for (int wx = 0, sx = Math.max(0, tx - connectionRange); wx < dx; wx++, sx++) {
//								if (width[currentLayer-1] > 1)
//									cppnSX = ((float) sx) / (width[currentLayer-1] - 1);		// Normalizing
//								else
//									cppnSX = 0.5f;
//								
//								cppnInput[cppnIdxSX] = cppnSX;
//															
//								// delta
//								if (includeDelta) {
//									cppnInput[cppnIdxDY] = cppnSY - cppnTY;
//									cppnInput[cppnIdxDX] = cppnSX - cppnTX;
//								}
//								if (includeAngle) {
//									float angle = (float) Math.atan2(cppnSY - cppnTY, cppnSX - cppnTX);
//									
//									angle /= 2 * (float) Math.PI;
//									if (angle < 0)
//										angle += 1;
//									cppnInput[cppnIdxAn] = angle;
//								}
//								
//								// Activate cppn for wgt val
//								cppnActivator.reset();
//								float[] cppnOutput = cppnActivator.next(cppnInput);
//
//								//set the weight val as appropriate
//							
//								float weightVal;
//								// Handle case of layer encoding is input
//								if (layerEncodingIsInput)
//									weightVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxW[0]]));
//								else 
//									weightVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxW[currentLayer-1]]));
//								
//								// check if wgt val is > connection expression threshold
//								if (Math.abs(weightVal) > connectionExprThresh) {
//									// if so, normalize it in the appropriate direction +/-
//									if (weightVal > 0)
//										weightVal = (weightVal - connectionExprThresh) * (connectionWeightMax / (connectionWeightMax - connectionExprThresh));
//									else
//										weightVal = (weightVal + connectionExprThresh) * (connectionWeightMin / (connectionWeightMin + connectionExprThresh));
//
//									w[wy][wx] = weightVal;
//								} 
//								// Otherwise, connection not strong enough, set to 0
//								else {
//									w[wy][wx] = 0;
//								}
//								
//								//bias
//								if (enableBias && sy == ty && sx == tx) {
//									float biasVal;
//									if (layerEncodingIsInput)
//										biasVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxB[0]]));
//									else
//										biasVal = Math.min(connectionWeightMax, Math.max(connectionWeightMin, cppnOutput[cppnIdxB[currentLayer-1]]));
//									if (Math.abs(biasVal) > connectionExprThresh) {
//										if (biasVal > 0)
//											biasVal = (biasVal - connectionExprThresh) * (connectionWeightMax / (connectionWeightMax - connectionExprThresh));
//										else
//											biasVal = (biasVal + connectionExprThresh) * (connectionWeightMin / (connectionWeightMin + connectionExprThresh));
//	
//										bias[currentLayer-1][ty][tx] = biasVal;
//									} else {
//										bias[currentLayer-1][ty][tx] = 0;
//									}
//								}
//
//							}
//						}
//					}
//				}
//				
//			}
//			
//			int[][][] connectionMaxRanges = new int[depth-1][3][2];
//			for (int l = 0; l < depth-1; l++) {
//				connectionMaxRanges[l][0][0] = -1; // no connections to previous or own layer
//				connectionMaxRanges[l][0][1] = 1;
//				connectionMaxRanges[l][1][0] = connectionRange;
//				connectionMaxRanges[l][1][1] = connectionRange;
//				connectionMaxRanges[l][2][0] = connectionRange;
//				connectionMaxRanges[l][2][1] = connectionRange;
//			}
//			int[][] layerDimensions = new int[inputLayer.getDimensions().size()][depth];
//			for (int l = 0; l < depth; l++) {
//				for ()
//				layerDimensions[0][l] = width[l];
//				layerDimensions[1][l] = height[l];
//			}
//			
//			if (createNewPhenotype) {
//				phenotype = new GridNet(connectionMaxRanges, layerDimensions, weights, bias, activationFunction, 1, "network " + genotype.getId());
//				logger.info("Substrate has " + phenotype.getConnectionCount(true) + " connections.");
//				
//				try {
//					logger.info("Substrate memory size: " + (int) Math.round(MemoryUtil.deepMemoryUsageOf(phenotype) / 1024.0) + "Kb.");
//				}
//				catch (IllegalStateException e) {};
//			}
//			else {
//				phenotype.setName("network " + genotype.getId());
//			}
//		}
//		else { // RECURRENT
//			
//			// Not supported for the moment
//		}
//
//		return phenotype;
//	}
//	
//
//	/**
//	 * @return
//	 */
//	private NDFloatArray[] createBiasArray() {
//		NDFloatArray[] bias;
//		bias = new NDFloatArray[depth - 1];
////			for (int l = 1; l < depth; l++)
////				bias[l-1] = new float[height[l]][width[l]];
//		int l = 0;
//		// create the bias based on the dimensions of each hidden layer
//		for (Hidden layer : hiddenLayers) {
//			int a[] = new int[0];
//			bias[l++] = new NDFloatArray(
//					XmlUtil.convertBigIntListToArray(layer.getDimensions())
//			);
//		}
//		
//		// And create the bias for the last layer.
//		bias[l] =  new NDFloatArray( 
//			XmlUtil.convertBigIntListToArray(outputLayer.getDimensions())
//		);
//		return bias;
//	}
//
//
//	@Override
//	public Class getPhenotypeClass() {
//		return GridNetND.class;
//	}
//
//	private class CppnInputDescriptor {
//		float[] inputs;
//		
//		int cppnIdxSrcLayer, cppnIdxTgtLayer, cppnIdxDLayer;
//		
//		// source/target coordinates:
//		// *** For now, assume that all layers have at most, same # of dimensions as input layer
//		int numDimensions;
//		int cppnIdxSrcCoords[]; 
//		int cppnIdxTgtCoords[];
//		int cppnIdxCoordDeltas[];
//		int cppnIdxAngles[];
//		
//		int cppnInputIdx;
//		
//		// determine cppn output mapping
//		int[] cppnIdxW; //weights (either a single output for all layers or one output per layer)
//		int[] cppnIdxB; //bias (either a single output for all layers or one output per layer)
//		
//		public CppnInputDescriptor(int numDimensions, int cppnInputLength) {
//			// *** Inputs
//			this.numDimensions = numDimensions;
//			this.cppnIdxSrcCoords = new int[numDimensions]; 
//			this.cppnIdxTgtCoords = new int[numDimensions];
//			this.cppnIdxCoordDeltas = new int[numDimensions];
//			this.cppnIdxAngles = new int[numDimensions-1];
//			
//			this.cppnInputIdx = 1; // 0 is always bias
//
//			for (int i = 0; i < cppnIdxTgtCoords.length; i++) {
//				cppnIdxTgtCoords[i] = cppnInputIdx++;
//			}
//			for (int i = 0; i < cppnIdxSrcCoords.length; i++) {
//				cppnIdxSrcCoords[i] = cppnInputIdx++;
//			}
//			
//			if (depth > 2 && layerEncodingIsInput) { // if depth == 2 network necessarily feed forward, and only one layer of connections can exist
//				cppnIdxTgtLayer = cppnInputIdx++;
//				if (!feedForward) {
//					cppnIdxSrcLayer = cppnInputIdx++; // source layer (could be any layer, not just previous layer)
//					if (includeDelta) // delta only when not feed forward (z delta always 1 for FF network)
//						cppnIdxDLayer = cppnInputIdx++; // z delta
//				}
//			}
//			if (includeDelta) {
//				for (int i = 0; i < cppnIdxCoordDeltas.length; i++) {
//					cppnIdxCoordDeltas[i] = cppnInputIdx++; 
//				}
//			}
//			if (includeAngle) {
//				for (int i = 0; i < cppnIdxAngles.length; i++) {
//					cppnIdxAngles[i] = cppnInputIdx++; // angle
//				}
//			}
//			
//			float[] inputs = new float[cppnInputLength];
//			inputs[0] = 1; // bias
//			
//			
//			// *** Outputs
//			
//			int cppnOutputIdx = 0;
//			if (layerEncodingIsInput) {
//				cppnIdxW = new int[1];
//				cppnIdxW[0] = cppnOutputIdx++; // weight value 
//				
//				if (enableBias) {
//					cppnIdxB = new int[1];
//					cppnIdxB[0] = cppnOutputIdx++; // bias value
//				}
//			} else { //one output per layer
//				cppnIdxW = new int[depth-1];
//				for (int w = 0; w < depth-1; w++)
//					cppnIdxW[w] = cppnOutputIdx++; // weight value 
//				
//				if (enableBias) {
//					cppnIdxB = new int[depth-1];
//					for (int w = 0; w < depth-1; w++)
//						cppnIdxB[w] = cppnOutputIdx++; // weight value 
//				}
//			}
//		}
//		
//		public float[] getCppnInputs() {
//			return inputs;
//		}
//		
//		public float getWeightOutputIndex(int currentLayer) {
//			if (layerEncodingIsInput)
//				return cppnIdxW[0];
//			return cppnIdxW[currentLayer-1];
//		}
//		
//		public float getBiasOutputIndex(int currentLayer) {
//			if (layerEncodingIsInput)
//				return cppnIdxB[0];
//			return cppnIdxB[currentLayer-1];
//		}
//	}
//}
