package com.anji.hyperneat.modular;

import java.io.IOException;
import java.util.List;

import javax.xml.bind.JAXBException;

import org.jgap.Chromosome;

import com.anji.hyperneat.nd.ActivatorND;
import com.anji.hyperneat.nd.NDActivatorArray;
import com.anji.hyperneat.nd.NDActivatorArray.ActivatorArrayIterator;
import com.anji.hyperneat.nd.NDFloatArray;
import com.anji.hyperneat.nd.Util;
import com.anji.integration.Activator;
import com.anji.integration.ActivatorTranscriber;
import com.anji.integration.AnjiActivator;
import com.anji.integration.AnjiNetTranscriber;
import com.anji.integration.Transcriber;
import com.anji.integration.TranscriberException;
import com.anji.nn.AnjiNet;
import com.anji.nn.activationfunction.ActivationFunction;
import com.anji.nn.activationfunction.ActivationFunctionFactory;
import com.anji.topology.Layer;
import com.anji.topology.Topology;
import com.anji.topology.XmlUtil;
import com.anji.util.Configurable;
import com.anji.util.Properties;

/**
 * Base class for modular Multinet transcription;
 * this class will build one or more nets from a single chromosome, according to 
 * the multinet description included in the topology xml;
 * This is intended to allow for modular extension to any portion of the Hyperneat algorithm
 * so as to support things like having the CPPN produce learning rates for online 
 * learning  in addition to producing weights & bias.
 * @author slusk
 *
 */
public abstract class AbstractModularHyperNeatMultiNetTranscriber implements Transcriber<NDActivatorArray>, Configurable {
	
	public static final String HYPERNEAT_ACTIVATION_FUNCTION_KEY = "ann.hyperneat.activation.function";
	public static final String HYPERNEAT_FEED_FORWARD_KEY = "ann.hyperneat.feedforward";
	public static final String HYPERNEAT_ENABLE_BIAS = "ann.hyperneat.enablebias";
	public static final String HYPERNEAT_INCLUDE_DELTA = "ann.hyperneat.includedelta";
	public static final String HYPERNEAT_INCLUDE_ANGLE = "ann.hyperneat.includeangle";
	public static final String HYPERNEAT_LAYER_ENCODING = "ann.hyperneat.useinputlayerencoding";
	public static final String HYPERNEAT_CYCLES_PER_STEP = "ann.hyperneat.cyclesperstep";
	public static final String HYPERNEAT_CONNECTION_RANGE = "ann.hyperneat.connection.range";
	public static final String HYPERNEAT_CONNECTION_EXPRESSION_THRESHOLD = "ann.hyperneat.connection.expression.threshold";
	public static final String HYPERNEAT_CONNECTION_WEIGHT_MIN = "ann.hyperneat.connection.weight.min";
	public static final String HYPERNEAT_CONNECTION_WEIGHT_MAX = "ann.hyperneat.connection.weight.max";
	public static final String HYPERNEAT_TOPOLOGY_DESCRIPTOR_FILE = "ann.hyperneat.topology.descriptor.file";
	public static final String MAX_DIMENSIONS = "ann.hyperneat.topology.max.dimensions";
	public static final String USE_NDIMENSIONAL_LAYERS = "ann.hyperneat.use.ndimensional.layers";
	public static final String HYPERNEAT_ENABLE_LEARNING_RATE = "ann.hyperneat.enable.learning.rate";
	public static final String HYPERNEAT_ENABLE_LEARNING_RATE_DECAY = "ann.hyperneat.enable.learning.rate.decay";
		
	
	protected AnjiNetTranscriber cppnTranscriber; // creates AnjiNets from chromosomes

	protected ActivationFunction activationFunction;
	protected boolean layerEncodingIsInput;
	protected boolean feedForward;
	protected int cyclesPerStep;
	protected boolean enableBias;
	protected boolean enableLearningRate;
	protected boolean enableLearningRateDecay;
	protected boolean includeDelta;
	protected boolean includeAngle;
	protected int connectionRange;
	protected float connectionWeightMin;
	protected float connectionWeightMax;
	protected float connectionExprThresh;
	
	protected int depth;
	protected int maxDimensions;
	
	// Topology Xml classes
	protected List<Layer> allLayersList;
	protected Topology topology;
	protected int[] multiNetDimensions;
	protected int[][] layerDimensions;
	
	protected boolean useTestFacade = false;
	protected int genotypeRecurrentCycles;
		
	
	/**
	 * Basic constructor.
	 * @param props
	 */
	public AbstractModularHyperNeatMultiNetTranscriber(Properties props) {
		init(props);
	}

		
	/**
	 * 
	 */
	@Override
	public void init(Properties props) {
		activationFunction = ActivationFunctionFactory.getInstance().get(props.getProperty(HYPERNEAT_ACTIVATION_FUNCTION_KEY));
	
		feedForward = props.getBooleanProperty(HYPERNEAT_FEED_FORWARD_KEY);
		if (!feedForward)
			cyclesPerStep = props.getIntProperty(HYPERNEAT_CYCLES_PER_STEP);
		layerEncodingIsInput = props.getBooleanProperty(HYPERNEAT_LAYER_ENCODING);
		enableBias = props.getBooleanProperty(HYPERNEAT_ENABLE_BIAS);
		enableLearningRate = props.getBooleanProperty(HYPERNEAT_ENABLE_LEARNING_RATE);
		enableLearningRateDecay = props.getBooleanProperty(HYPERNEAT_ENABLE_LEARNING_RATE_DECAY);
		
		
		includeDelta = props.getBooleanProperty(HYPERNEAT_INCLUDE_DELTA);
		includeAngle = props.getBooleanProperty(HYPERNEAT_INCLUDE_ANGLE);
	
		connectionRange = props.getIntProperty(HYPERNEAT_CONNECTION_RANGE);
		connectionExprThresh = props.getFloatProperty(HYPERNEAT_CONNECTION_EXPRESSION_THRESHOLD);
		connectionWeightMin = props.getFloatProperty(HYPERNEAT_CONNECTION_WEIGHT_MIN);
		connectionWeightMax = props.getFloatProperty(HYPERNEAT_CONNECTION_WEIGHT_MAX);
		String topologyDescriptorFile = props.getProperty(HYPERNEAT_TOPOLOGY_DESCRIPTOR_FILE);
		
		topology = null;
		try {
			topology = XmlUtil.getTopologyDescriptor(topologyDescriptorFile);
		} catch (JAXBException e) {
			e.printStackTrace();
		}		
		
		allLayersList = XmlUtil.getAllLayers(topology);
		multiNetDimensions = XmlUtil.convertByteListToArray(topology.getMultinet().getDimensions());
		
		// Input + Output layer + # hidden layers, if present
		depth = allLayersList.size();
		
		maxDimensions = allLayersList.get(0).getDimensions().size();
		layerDimensions = XmlUtil.convertLayersListToArray(allLayersList, maxDimensions);
	
		cppnTranscriber = (AnjiNetTranscriber) props.singletonObjectProperty(AnjiNetTranscriber.class);
		genotypeRecurrentCycles = props.getIntProperty(ActivatorTranscriber.RECURRENT_CYCLES_KEY, 1);
	
	}

	/* (non-Javadoc)
	 * @see com.anji.hyperneat.modular.IModularHyperNeatTranscriber#getDepth()
	 */
	public int getDepth() {return depth;}
		
	/* (non-Javadoc)
	 * @see com.anji.hyperneat.modular.IModularHyperNeatTranscriber#transcribe(org.jgap.Chromosome)
	 */
	@Override
	public NDActivatorArray transcribe(Chromosome c) throws TranscriberException {
		return newGridNetArray(c, null);
	}

	/* (non-Javadoc)
	 * @see com.anji.hyperneat.modular.IModularHyperNeatTranscriber#transcribe(org.jgap.Chromosome, com.anji.hyperneat.nd.NDActivatorArray)
	 */
	@Override
	public NDActivatorArray transcribe(Chromosome c, NDActivatorArray substrates) throws TranscriberException {
		return newGridNetArray(c, substrates);
	}
		
	/**
	 * This method produces an array of Activators, according to the multinet
	 * descriptor in the topology xml.  Calls create net to create each individual net. 
	 * @param genotype The chromosome from which these activators will be created.
	 * @param phenotype The existing set of activators, provided to allow object reuse.
	 * @return An array of activators.
	 * @throws TranscriberException
	 */
	public NDActivatorArray newGridNetArray(Chromosome genotype, NDActivatorArray phenotypes) throws TranscriberException {
		
		if (null == phenotypes) 
			phenotypes = new NDActivatorArray(multiNetDimensions);
		Activator cppn = getCppnActivator(genotype);
		
		for (ActivatorArrayIterator it = phenotypes.iterator(); it.hasNext(); it.next()) {
			int[] subNetCoords = it.getCurrentCoordinates();
			ActivatorND net = createNet(cppn, phenotypes.get(subNetCoords), subNetCoords, genotype);
			it.set( net );
		}
		
		return phenotypes;		
	}

	/**
	 * This method uses a CPPN to produce a net; sub classes should specify exactly how this is accomplished;
	 * see ModularHyperNeatMultiNetTranscriber for basic usage.
	 * @param cppn A compositional pattern producing activator.
	 * @param activatorND An existing activator provided for reuse, or null to create from scratch.
	 * @param subNetCoords The coordinates of the net to be created within the array of nets. 
	 * @param genotype The chromosome
	 * @return An activator.
	 */
	protected abstract ActivatorND createNet(Activator cppn, ActivatorND activatorND, int[] subNetCoords, Chromosome genotype);
		
	/**
	 * Create a cppn from a chromosome.
	 * @param genotype
	 * @return
	 * @throws TranscriberException
	 */
	protected Activator getCppnActivator(Chromosome genotype) throws TranscriberException {
		Activator cppnActivator = null;
		if (useTestFacade) {
			// Hacktastic
//
//			int outCount = 1;
//			if (layerEncodingIsInput) outCount += 1;
//			else outCount += depth-1;
//			CppnInputs tempIns = new CppnInputs(maxDimensions, allLayersList);
//			
//			cppnActivator = new ActivationFacade(new int[] {tempIns.inputs.length}
//					,new int[] {outCount}
//					,connectionWeightMin
//					,connectionWeightMax
//			);
				
		} else {
			AnjiNet cppn = cppnTranscriber.transcribe(genotype);
			cppnActivator = new AnjiActivator(cppn, genotypeRecurrentCycles);
		}
		return cppnActivator;
	}
	
	/**
	 * Sets the value of a bias element in a bias array.
	 * @param bias The array of bias values.
	 * @param coords The coordinates of the position in the bias array.
	 * @param biasVal the value to be set.
	 */
	protected void setBiasAtCoords(NDFloatArray bias, int[] coords, float biasVal) {
		bias.set(biasVal, Util.getTgtCoordsFromCoordsSet(coords));
	}
	

	/**
	 * Checks if the CPPN bias node should be queried; note that this will break
	 * if the dimension of the target layer is smaller than its corresponding 
	 * dimension in the source layer (as in, this will return false in every such
	 * case, and some/all bias values will never be populated); this should be fixed
	 * with better logic.
	 * @param coords The set of coordinates for the weight matrix.
	 * @return
	 */
	protected boolean isCoordsForBias(int[] coords) {
		for (int i = 0; i < maxDimensions; i++) {
			if (coords[i] != coords[i+maxDimensions]) return false;
		}
		return true;
	}

	/**
	 * Creates the weight array based on the current configuration.
	 * @return An uninitialized weight array.
	 */
	protected NDFloatArray[] createWeightArray() {
		NDFloatArray[] weights = new NDFloatArray[depth-1];
		for (int l = 1; l < depth; l++)	{// Start with second layer go to end
			// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
			int[] lastLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l-1).getDimensions());
			int[] currentLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l).getDimensions());
			
			// Cast the weight array to dimensions of more dimensional layer
			// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
			//int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
//				int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
			
			weights[l-1] = new NDFloatArray(
					maxDimensions
					, currentLayerDimensions
					, lastLayerDimensions
					
			);
		}
		return weights;
	}
		
	/**
	 * Creates the an array with the same dimensions as the weight array, unless fixedValue is non-zero;
	 * in that case create a single element array containing the fixed value.  Used for learning rates. 
	 * @param fixedVal 
	 * @return An uninitialized weight array.
	 */
	protected NDFloatArray[] createWeightDimmedArray(float fixedVal) {
		NDFloatArray[] weights = null;
		if (0.0f == fixedVal) {
			weights = new NDFloatArray[depth-1];
			for (int l = 1; l < depth; l++)	{// Start with second layer go to end
				// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
				int[] lastLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l-1).getDimensions());
				int[] currentLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l).getDimensions());
				
				// Cast the weight array to dimensions of more dimensional layer
				// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
				//int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
	//				int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
				
				weights[l-1] = new NDFloatArray(
						maxDimensions
						, currentLayerDimensions
						, lastLayerDimensions
						
				);
			}
		} else {
			weights = new NDFloatArray[1];
			weights[0] = new NDFloatArray(new int[] {1});
			weights[0].set(fixedVal, 0);
		}
		
		return weights;
	}

	/**
	 * Creates the bias array based on the current configuration.
	 * @return An uninitialized bias array.
	 */
	protected NDFloatArray[] createBiasArray() {
		NDFloatArray[] bias;
		bias = new NDFloatArray[depth - 1];

//			int l = 0;
		// create the bias based on the dimensions of each hidden layer
//			for (Hidden layer : hiddenLayers) {
//				int a[] = new int[0];
//				bias[l++] = new NDFloatArray(
//						XmlUtil.convertBigIntListToArray(layer.getDimensions())
//				);
//			}
//			
//			// And create the bias for the last layer.
//			bias[l] =  new NDFloatArray( 
//				XmlUtil.convertBigIntListToArray(outputLayer.getDimensions())
//			);
		
		for (int l = 1; l < depth; l++)	{// Start with second layer go to end
			// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
			int[] currentLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l).getDimensions());
			
			// Cast the weight array to dimensions of more dimensional layer
			// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
			//int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
//				int dimCastSize = Math.max(lastLayerDimensions.length, currentLayerDimensions.length);
			
			// Cast it to maxDimension size
			bias[l-1] = new NDFloatArray(maxDimensions, currentLayerDimensions);
		}
		
		return bias;
	}
	
	/**
	 * Creates the an array with the same dimensions as the bias array, unless fixedValue is non-zero;
	 * in that case create a single element array containing the fixed value.  Used for learning rates. 
	 * @param fixedVal 
	 * @return An uninitialized array.
	 */
	protected NDFloatArray[] createBiasDimmedArray(float fixedVal) {
		NDFloatArray[] bias;
		if (0.0f == fixedVal) {
			bias = new NDFloatArray[depth - 1];
	
			for (int l = 1; l < depth; l++)	{// Start with second layer go to end
				// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
				int[] currentLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l).getDimensions());
				
				// Cast the weight array to dimensions of more dimensional layer
				// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
	
				// Cast it to maxDimension size
				bias[l-1] = new NDFloatArray(maxDimensions, currentLayerDimensions);
			}
		} else {
			bias = new NDFloatArray[1];
			bias[0] = new NDFloatArray(new int[] {1});
			bias[0].set(fixedVal, 0);
		}
		
		return bias;
	}

	/* (non-Javadoc)
	 * @see com.anji.hyperneat.modular.IModularHyperNeatTranscriber#getPhenotypeClass()
	 */
	@Override
	public Class getPhenotypeClass() {
		return NDActivatorArray.class;
	}

	public abstract int getNumCppnInputs();
	public abstract int getNumCppnOutputs();
}
