package com.anji.hyperneat.modular;

import java.io.IOException;

import org.jgap.Chromosome;
import org.jgap.Configuration;

import com.anji.hyperneat.nd.ActivatorND;
import com.anji.hyperneat.nd.NDFloatArray;
import com.anji.hyperneat.nd.Util;
import com.anji.hyperneat.nd.NDFloatArray.MatrixIterator;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDLR.LearningRateGranularity;
import com.anji.hyperneat.onlinereinforcement.GridNetTD;
import com.anji.integration.Activator;
import com.anji.integration.TranscriberException;
import com.anji.persistence.Persistence;
import com.anji.topology.XmlUtil;
import com.anji.util.DummyConfiguration;
import com.anji.util.Properties;

public class TemporalDifferenceTranscriber extends ModularHyperNeatMultiNetTranscriber {
	public static final String TD_ENABLE_GAMMA_DECAY = "ann.hyperneat.temporalDifference.enable.gamma.decay";
	public static final String TD_ENABLE_LAMBDA_DECAY = "ann.hyperneat.temporalDifference.enable.lambda.decay";
	public static final String TD_ENABLE_ALPHA_DECAY = "ann.hyperneat.temporalDifference.enable.alpha.decay";
	public static final String TD_FIXED_GAMMA = "ann.hyperneat.temporalDifference.fixed.gamma";
	public static final String TD_FIXED_LAMBDA = "ann.hyperneat.temporalDifference.fixed.lambda";
	public static final String TD_FIXED_ALPHA = "ann.hyperneat.temporalDifference.fixed.alpha";
	public static final String TD_FIXED_GAMMA_DECAY = "ann.hyperneat.temporalDifference.fixed.gamma.decay";
	public static final String TD_FIXED_LAMBDA_DECAY = "ann.hyperneat.temporalDifference.fixed.lambda.decay";
	public static final String TD_FIXED_ALPHA_DECAY = "ann.hyperneat.temporalDifference.fixed.alpha.decay";
	
	protected OutputMapper outputMapperGamma;
	protected OutputMapper outputMapperLambda;
	protected OutputMapper outputMapperAlpha;
	protected OutputMapper outputMapperGammaDecay;
	protected OutputMapper outputMapperLambdaDecay;
	protected OutputMapper outputMapperAlphaDecay;
	
	protected boolean enableGammaDecay;
	protected boolean enableLambdaDecay;
	protected boolean enableAlphaDecay;
	
	protected float fixedGamma;
	protected float fixedLambda;
	protected float fixedAlpha;
	protected float fixedGammaDecay;
	protected float fixedLambdaDecay;
	protected float fixedAlphaDecay;

	public TemporalDifferenceTranscriber(Properties props) {
		super(props);
		// TODO Auto-generated constructor stub
	}
	
	@Override
	public void init(Properties props) {
		super.init(props);
		
		// For TD, use full depth, since there is an extra gamma layer
		layerInputMapper = new FloatInputMapper(0, depth);
		
		// If the fixed value is non-zero, the fixed value will be used; if it is 0, values will be transcribed at a per-UNIT level 
		fixedGamma = props.getFloatProperty(TD_FIXED_GAMMA, 0.0f);
		fixedLambda = props.getFloatProperty(TD_FIXED_LAMBDA, 0.0f);
		fixedAlpha = props.getFloatProperty(TD_FIXED_ALPHA, 0.0f);
		fixedGammaDecay = props.getFloatProperty(TD_FIXED_GAMMA_DECAY, 0.0f);
		fixedLambdaDecay = props.getFloatProperty(TD_FIXED_LAMBDA_DECAY, 0.0f);
		fixedAlphaDecay = props.getFloatProperty(TD_FIXED_ALPHA_DECAY, 0.0f);
		
		enableGammaDecay = props.getBooleanProperty(TD_ENABLE_GAMMA_DECAY);
		enableLambdaDecay = props.getBooleanProperty(TD_ENABLE_LAMBDA_DECAY);
		enableAlphaDecay = props.getBooleanProperty(TD_ENABLE_ALPHA_DECAY);
		
		outputMapperGamma = new OutputMapper();
		outputMapperLambda = new OutputMapper();
		outputMapperAlpha = new OutputMapper();
		outputMapperGammaDecay = new OutputMapper();
		outputMapperLambdaDecay = new OutputMapper();
		outputMapperAlphaDecay = new OutputMapper();
		
		// enableLearningRate is irrelevant for TD
//		numCppnOutputs += enableLearningRate ? 2 : 0;
//		numCppnOutputs += enableLearningRate && enableLearningRateDecay ? 2 : 0;
		numCppnOutputs += (0.0f == fixedGamma) ? 1 : 0;
		numCppnOutputs += (0.0f == fixedLambda) ? 1 : 0;
		numCppnOutputs += (0.0f == fixedAlpha) ? 1 : 0;
		numCppnOutputs += (enableLearningRateDecay && 0.0f == fixedGammaDecay) ? 1 : 0;
		numCppnOutputs += (enableLearningRateDecay && 0.0f == fixedLambdaDecay) ? 1 : 0;
		numCppnOutputs += (enableLearningRateDecay && 0.0f == fixedAlphaDecay) ? 1 : 0;
	}

	/**
	 * Create a new GridNetNDLR activator for the HN algorithm to operate on; 
	 * weights, bias will be not be initialized.
	 * @param genotype The chromosome for this set of GridNets
	 * @return
	 */
	@Override
	protected ActivatorND setupActivator(Chromosome genotype) {
		NDFloatArray[] weights = createWeightArray();
		NDFloatArray[] bias = createBiasArray();
//		NDFloatArray[] biasLearningRates = createBiasArray();
//		NDFloatArray[] biasLearningRateDecays = createBiasArray();
		NDFloatArray[] gammas = createGammaArray(fixedGamma);
		NDFloatArray[] gammaDecays = enableGammaDecay ? createGammaArray(fixedGammaDecay) : null;
		NDFloatArray[] lambdas = createWeightDimmedArray(fixedLambda);
		NDFloatArray[] lambdaDecays = enableLambdaDecay ? createWeightDimmedArray(fixedLambdaDecay) : null;
		NDFloatArray[] alphas = createWeightDimmedArray(fixedAlpha);
		NDFloatArray[] alphaDecays = enableAlphaDecay ? createWeightDimmedArray(fixedAlphaDecay) : null;
		
		GridNetTD net = new GridNetTD(
				allLayersList
				, weights
				, bias
				, gammas
				, lambdas
				, alphas
				, gammaDecays
				, lambdaDecays
				, alphaDecays
				, fixedGamma == 0.0f ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE	// gamma granularity
				, fixedLambda == 0.0f ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE	// lambda granularity
				, fixedAlpha == 0.0f ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE	// alpha granularity
				, fixedGammaDecay == 0.0f ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE	// gammaDecay granularity
				, fixedLambdaDecay == 0.0f ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE	// lambdaDecay granularity
				, fixedAlphaDecay == 0.0f ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE	// alphaDecay granularity
				, enableGammaDecay
				, enableLambdaDecay
				, enableAlphaDecay
				, activationFunction
				, maxDimensions
				, 0							// cycles per step
				, true						// enable bias
				, "network " + genotype.getId()
			);

		return net;
	}
	
	@Override
	protected ActivatorND createNet(Activator cppn, ActivatorND activatorND, int[] subNetCoords, Chromosome genotype) {
		
		if (null == activatorND) activatorND = setupActivator(genotype);		
		
		// Setup stuff
		CppnMapper cppnMapper = getCppnMapper(cppn);
		
		// Grab the weight array for coordinate iteration
		NDFloatArray[] weights = activatorND.getWeights();
		
		// Register Input Plugins
		registerInputPlugins(cppnMapper);
		
		// Register Output Plugins
		registerOutputPlugins(cppnMapper);
		
		for (int layer = 1; layer <= depth; layer++) {	// when layer=depth, query CPPN for output layer gammas
			if (layer < depth) {
				for (MatrixIterator w = weights[layer-1].iterator(); w.hasNext(); w.next()) {
					// Pass the entire current state of the transcriber - mostly in case extending classes need easy access to anything
					populateCppnMapperInputs(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
									
					// Query CPPN
					queryCppn(cppnMapper);
					
					// Pass the entire current state of the transcriber - mostly in case extending classes need easy access to anything
					processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);				
				}
			} 
			else if (0.0f == fixedGamma) {
				for (MatrixIterator w = weights[layer-2].iterator(); w.hasNext(); w.next()) {
					// Pass the entire current state of the transcriber - mostly in case extending classes need easy access to anything
					popCppnInputsForOutputGammas(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
									
					// Query CPPN
					queryCppn(cppnMapper);
					
					// Pass the entire current state of the transcriber - mostly in case extending classes need easy access to anything
					processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);				
				}
			}			
		}

		return activatorND;
	}
	
	@Override
	protected void registerOutputPlugins(CppnMapper cppnMapper) {
		cppnMapper.registerOutputPlugin(outputMapperWeight);
		if (enableBias) 
			cppnMapper.registerOutputPlugin(outputMapperBias);
		
		if (0.0f == fixedGamma) {
			cppnMapper.registerOutputPlugin(outputMapperGamma);
		}
		if (enableGammaDecay && 0.0f == fixedGammaDecay) {
			cppnMapper.registerOutputPlugin(outputMapperGammaDecay);
		}
		
		if (0.0f == fixedLambda) {
			cppnMapper.registerOutputPlugin(outputMapperLambda);
		}
		if (enableLambdaDecay && 0.0f == fixedLambdaDecay) {
			cppnMapper.registerOutputPlugin(outputMapperLambdaDecay);
		}
		
		if (0.0f == fixedAlpha) {
			cppnMapper.registerOutputPlugin(outputMapperAlpha);
		}
		if (enableAlphaDecay && 0.0f == fixedAlphaDecay) {
			cppnMapper.registerOutputPlugin(outputMapperAlphaDecay);
		}
		
	}
	
	protected void popCppnInputsForOutputGammas(Activator cppn, ActivatorND activatorND, int[] subNetCoords, CppnMapper cppnMapper, int layer, MatrixIterator w) {
		
		subNetCoordsInputMapper.populateValues(subNetCoords);
		
		int[] wCoords = w.getCurrentCoordinates();
		// Treat it like the last layer of weights... cause I can't think of anything better right now.
		srcNodeCoordsInputMapper.populateValues(layer-2, Util.getSrcCoordsFromCoordsSet(wCoords));
		tgtNodeCoordsInputMapper.populateValues(layer-1, Util.getTgtCoordsFromCoordsSet(wCoords));
		// But, use the current layer as input
		layerInputMapper.populateValue(layer);
		
		if (includeDelta) {
			populateCoordinateDeltas(tgtNodeCoordsInputMapper.getNormalizedInputs(), srcNodeCoordsInputMapper.getNormalizedInputs());
		}
		
		if (includeAngle) {
			populateAngles(tgtNodeCoordsInputMapper.getNormalizedInputs(), srcNodeCoordsInputMapper.getNormalizedInputs());
		}	
	}
	
	@Override
	protected void processOutputsFromCppn(Activator cppn, ActivatorND activatorND, int[] subNetCoords, CppnMapper cppnMapper, int layer, MatrixIterator w) {
		if (layer < depth) {
			super.processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
			
			int[] coords = w.getCurrentCoordinates();
			GridNetTD net = (GridNetTD) activatorND;
			
			if (0.0f == fixedGamma) {
				NDFloatArray gammas = net.getGammas() [layer-1];
				gammas.set(translateOutputToWeightValue(outputMapperGamma.getValue()), coords);
			}
			
			if (0.0f == fixedLambda) {
				NDFloatArray lambdas = net.getLambdas() [layer-1];
				lambdas.set(translateOutputToWeightValue(outputMapperLambda.getValue()), coords);
			}
			
			if (0.0f == fixedAlpha) {
				NDFloatArray alphas = net.getAlphas() [layer-1];
				alphas.set(translateOutputToWeightValue(outputMapperAlpha.getValue()), coords);
			}
			
			if (enableGammaDecay && fixedGammaDecay == 0.0f) {
				NDFloatArray gammaDecays = net.getGammaDecays() [layer-1];
				gammaDecays.set(translateOutputToWeightValue(outputMapperGammaDecay.getValue()), coords);
			}
			if (enableLambdaDecay && fixedLambdaDecay == 0.0f) {
				NDFloatArray lambdaDecays = net.getLambdaDecays() [layer-1];
				lambdaDecays.set(translateOutputToWeightValue(outputMapperLambdaDecay.getValue()), coords);
			}
			if (enableAlphaDecay && fixedAlphaDecay == 0.0f) {
				NDFloatArray alphaDecays = net.getAlphaDecays() [layer-1];
				alphaDecays.set(translateOutputToWeightValue(outputMapperAlphaDecay.getValue()), coords);
			}
		} else if (0.0f == fixedGamma)  {
			// Only looking at the gammas for the output layer
			int[] coords = w.getCurrentCoordinates();
			if (isCoordsForBias(coords)) {
				GridNetTD net = (GridNetTD) activatorND;
				NDFloatArray gammas = net.getGammas() [layer-1];
				gammas.set(translateOutputToWeightValue(outputMapperGamma.getValue()), coords);
				
				if (enableGammaDecay) {
					NDFloatArray gammaDecays = net.getGammaDecays() [layer-1];
					gammaDecays.set(translateOutputToWeightValue(outputMapperGammaDecay.getValue()), coords);
				}
			}
		}
	}
	
	
	protected NDFloatArray[] createGammaArray(float fixedVal) {
		NDFloatArray[] gammas;
		if (0.0f == fixedVal) {
			gammas = new NDFloatArray[depth];
			for (int l = 1; l <= depth; l++)	{// Start with second layer go to end; for gammas, also need for output layer
				if (l < depth) {
					// Make the wgts for connections between this layer and the previous; for now, ignore connection ranges.
					int[] lastLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l-1).getDimensions());
					int[] currentLayerDimensions = XmlUtil.convertByteListToArray(allLayersList.get(l).getDimensions());
					
					// Cast the weight array to dimensions of more dimensional layer
					// so giving L0: 7x7x7 and L1: 10x10 produce a weight matrix of 10x10x1x7x7x7 
					
					gammas[l-1] = new NDFloatArray(
							maxDimensions
							, currentLayerDimensions
							, lastLayerDimensions
							
					);
				} else {
					gammas[l-1] = new NDFloatArray(XmlUtil.convertByteListToArray(allLayersList.get(l-1).getDimensions()));
				}			
			}
		} else {
			gammas = new NDFloatArray[1];
			gammas[0] = new NDFloatArray(new int[] {1});
			gammas[0].set(fixedVal, 0);
		}
		return gammas;
	}

	/**
	 * @param args
	 * @throws IOException 
	 * @throws TranscriberException 
	 */
	public static void main(String[] args) throws IOException, TranscriberException {
		// Setup
		Properties props = new Properties("properties/modTest.properties");
		TemporalDifferenceTranscriber xcriber = new TemporalDifferenceTranscriber(props);
		
		Persistence db = (Persistence) props.newObjectProperty( Persistence.PERSISTENCE_CLASS_KEY );
		Configuration config = new DummyConfiguration();
		Chromosome chrom = db.loadChromosome("10161", config);
		
		// Test methods from base class
		
////		 * createBiasArray
//		NDFloatArray[] biasArray = xcriber.createBiasArray();
//		
////		 * createWeightArray
//		NDFloatArray[] weightArray = xcriber.createWeightArray();
		
//		 * getCppnActivator
		Activator cppn = xcriber.getCppnActivator(chrom);
		
//		 * getDepth
//		int depth = xcriber.getDepth();
//		
////		 * getPhenotypeClass
//		Class c = xcriber.getPhenotypeClass();
//		
////		 * isCoordsForBias
//		int[] testCoords = new int[] {0,1,2,0,0,1,2,0};
//		int[] testCoords2 = new int[] {0,1,2,0,0,1,2,1};
//		boolean isBias = xcriber.isCoordsForBias(testCoords);
//		boolean isBias2 = xcriber.isCoordsForBias(testCoords2);
//		
////		 * setBiasAtCoords
//		xcriber.setBiasAtCoords(biasArray[0], testCoords, 0.5f);
//		
////		 
//		
//		
////		 * transcribe
//		
////		 * transcribe
//		 
//		// Test local methods
//		
////		 * getBiasValue
//		xcriber.outputMapperBias.accept(0.25f);
//		float biasVal = xcriber.getBiasValue();
		
//		 * getCppnMapper
		CppnMapper mapper = xcriber.getCppnMapper(cppn);
		
//		 * populateAngles
//		float[] normTgtCoords = new float[] {0.0f,1.0f, 0.5f, 0.0f};
//		float[] normSrcCoords = new float[] {-0.5f,1.0f, -1.0f, 0.0f};
//		xcriber.populateAngles(normTgtCoords, normSrcCoords);	// now check angle mapper
//		
////		 * populateCoordinateDeltas
//		xcriber.populateCoordinateDeltas(normTgtCoords, normSrcCoords);
					

		
//		 * registerInputPlugins
//		xcriber.registerInputPlugins(mapper);
		
//		 * registerOutputPlugins
		xcriber.registerOutputPlugins(mapper);
		
//		 * setupActivator
		ActivatorND activatorND = xcriber.setupActivator(chrom);
		
//		 * translateOutputToWeightValue
//		 float weight = xcriber.translateOutputToWeightValue(0.5f);
		 
//		 * populateCppnMapperInputs
//		xcriber.populateCppnMapperInputs(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		 
//		 * queryCppn
//			xcriber.queryCppn(mapper);
			
//		 * processOutputsFromCppn
//		xcriber.processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		
		//* newGridNetArray
		
		
		// xcriber.newGridNetArray(chrom, null);
		
//		Activator cppn = xcriber.getCppnActivator(chrom);
		ActivatorND net = xcriber.createNet(cppn, null, new int[] {0,0}, chrom);

	}

}
