package com.anji.hyperneat.modular;

import java.io.IOException;

import org.jgap.Chromosome;
import org.jgap.Configuration;

import com.anji.hyperneat.nd.ActivatorND;
import com.anji.hyperneat.nd.GridNetND;
import com.anji.hyperneat.nd.NDFloatArray;
import com.anji.hyperneat.nd.NDFloatArray.MatrixIterator;
import com.anji.hyperneat.onlinereinforcement.GridNetNDLR;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDLR.LearningRateGranularity;
import com.anji.integration.Activator;
import com.anji.integration.TranscriberException;
import com.anji.persistence.Persistence;
import com.anji.util.DummyConfiguration;
import com.anji.util.Properties;

public class ModularHNLRMultiNetTranscriber extends ModularHyperNeatMultiNetTranscriber {
	public static final String LR_FIXED_WEIGHTLR = "ann.hyperneat.learningRate.fixed.weightlr";
	public static final String LR_FIXED_BIASLR = "ann.hyperneat.learningRate.fixed.biaslr";
	public static final String LR_FIXED_WEIGHTLR_DECAY = "ann.hyperneat.learningRate.fixed.weightlr.decay";
	public static final String LR_FIXED_BIASLR_DECAY = "ann.hyperneat.learningRate.fixed.biaslr.decay";
	
	protected float fixedWeightLr;
	protected float fixedBiasLr;
	protected float fixedWeightLrDecay;
	protected float fixedBiasLrDecay;
	
	protected OutputMapper outputMapperWeightLearningRate;
	protected OutputMapper outputMapperWeightLearningRateDecay;
	protected OutputMapper outputMapperBiasLearningRate;
	protected OutputMapper outputMapperBiasLearningRateDecay;
	
	public ModularHNLRMultiNetTranscriber(Properties props) {
		super(props);
	}

	@Override
	public void init(Properties props) {
		super.init(props);
		
		// If the fixed value is non-zero, the fixed value will be used; if it is 0, values will be transcribed at a per-UNIT level 
		fixedWeightLr = props.getFloatProperty(LR_FIXED_WEIGHTLR, 0.0f);
		fixedBiasLr = props.getFloatProperty(LR_FIXED_BIASLR, 0.0f);
		fixedWeightLrDecay = props.getFloatProperty(LR_FIXED_WEIGHTLR_DECAY, 0.0f);
		fixedBiasLrDecay = props.getFloatProperty(LR_FIXED_BIASLR_DECAY, 0.0f);
		
		outputMapperWeightLearningRate = new OutputMapper();
		outputMapperWeightLearningRateDecay = new OutputMapper();
		outputMapperBiasLearningRate = new OutputMapper();
		outputMapperBiasLearningRateDecay = new OutputMapper();
		
		numCppnOutputs += (enableLearningRate && 0.0f == fixedWeightLr) ? 1 : 0;
		numCppnOutputs += (enableLearningRate && 0.0f == fixedBiasLr) ? 1 : 0;
		numCppnOutputs += (enableLearningRateDecay && 0.0f == fixedWeightLrDecay) ? 1 : 0;
		numCppnOutputs += (enableLearningRateDecay && 0.0f == fixedBiasLrDecay) ? 1 : 0;
	}
	
	/**
	 * Create a new GridNetNDLR activator for the HN algorithm to operate on; 
	 * weights, bias will be not be initialized.
	 * @param genotype The chromosome for this set of GridNets
	 * @return
	 */
	@Override
	protected ActivatorND setupActivator(Chromosome genotype) {
		NDFloatArray[] weights = createWeightArray();
		NDFloatArray[] weightLearningRates = createWeightDimmedArray(fixedWeightLr);
		NDFloatArray[] weightLearningRateDecays = enableLearningRateDecay ? createWeightDimmedArray(fixedWeightLrDecay) : null;
		NDFloatArray[] bias = createBiasArray();
		NDFloatArray[] biasLearningRates = createBiasDimmedArray(fixedBiasLr);
		NDFloatArray[] biasLearningRateDecays = enableLearningRateDecay ? createBiasDimmedArray(fixedBiasLrDecay) : null;
		
		GridNetNDLR net = new GridNetNDLR(allLayersList
				, weights
				, bias
				, weightLearningRates
				, biasLearningRates
				, weightLearningRateDecays
				, biasLearningRateDecays
				, activationFunction
				, maxDimensions
				, 0
				, enableBias
				, "network " + genotype.getId()
				// This is a problem if you try to used fixed weight learning rates, but transcribed bias learning rates
				, 0.0f == fixedWeightLr ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE
				, 0.0f == fixedWeightLrDecay ? LearningRateGranularity.UNIT : LearningRateGranularity.SINGLE
				, enableLearningRateDecay);
		
		return net;
	}
	
	@Override
	protected void registerOutputPlugins(CppnMapper cppnMapper) {
		super.registerOutputPlugins(cppnMapper);
		if (enableLearningRate) {
			if (0.0f == fixedWeightLr)
				cppnMapper.registerOutputPlugin(outputMapperWeightLearningRate);
			if (enableBias && 0.0f == fixedBiasLr)
				cppnMapper.registerOutputPlugin(outputMapperBiasLearningRate);
			if (enableLearningRateDecay) {
				if (0.0f == fixedWeightLrDecay)
					cppnMapper.registerOutputPlugin(outputMapperWeightLearningRateDecay);
				if (enableBias && 0.0f == fixedBiasLrDecay)
					cppnMapper.registerOutputPlugin(outputMapperBiasLearningRateDecay);
			}
		}
	}
	
	@Override
	protected void processOutputsFromCppn(Activator cppn, ActivatorND activatorND, int[] subNetCoords, CppnMapper cppnMapper, int layer, MatrixIterator w) {
		super.processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		
		if (enableLearningRate) {
			int[] coords = w.getCurrentCoordinates();
			GridNetNDLR net = (GridNetNDLR) activatorND;
			
			if (0.0f == fixedWeightLr) {
				NDFloatArray weightLearningRates = net.getWeightLearningRates() [layer-1];
				weightLearningRates.set(translateOutputToWeightValue(outputMapperWeightLearningRate.getValue()), coords);
			}
			
			if (isCoordsForBias(coords) && enableBias && 0.0f == fixedBiasLr) {
				NDFloatArray biasLearningRates = net.getBiasLearningRates() [layer-1];
				biasLearningRates.set(translateOutputToWeightValue(outputMapperBiasLearningRate.getValue()), coords);
			}
			if (enableLearningRateDecay) {
				if (0.0f == fixedWeightLrDecay) {
					NDFloatArray weightLearningRateDecays = net.getWeightLearningRateDecays() [layer-1];
					weightLearningRateDecays.set(translateOutputToWeightValue(outputMapperWeightLearningRateDecay.getValue()), coords);
				}
				
				if (isCoordsForBias(coords) && enableBias && 0.0f == fixedBiasLrDecay) {
					NDFloatArray biasLearningRateDecays = net.getBiasLearningRateDecays() [layer-1];
					biasLearningRateDecays.set(translateOutputToWeightValue(outputMapperBiasLearningRateDecay.getValue()), coords);
				}
			}
		}
	}
	
	public static void main(String[] args) throws IOException, TranscriberException {
		// Setup
		Properties props = new Properties("properties/modTest.properties");
		ModularHNLRMultiNetTranscriber xcriber = new ModularHNLRMultiNetTranscriber(props);
		
		Persistence db = (Persistence) props.newObjectProperty( Persistence.PERSISTENCE_CLASS_KEY );
		Configuration config = new DummyConfiguration();
		Chromosome chrom = db.loadChromosome("10160", config);
		
		// Test methods from base class
		/*
//		 * createBiasArray
		NDFloatArray[] biasArray = xcriber.createBiasArray();
		
//		 * createWeightArray
		NDFloatArray[] weightArray = xcriber.createWeightArray();
		
//		 * getCppnActivator
		Activator cppn = xcriber.getCppnActivator(chrom);
		
//		 * getDepth
		int depth = xcriber.getDepth();
		
//		 * getPhenotypeClass
		Class c = xcriber.getPhenotypeClass();
		
//		 * isCoordsForBias
		int[] testCoords = new int[] {0,1,2,0,0,1,2,0};
		int[] testCoords2 = new int[] {0,1,2,0,0,1,2,1};
		boolean isBias = xcriber.isCoordsForBias(testCoords);
		boolean isBias2 = xcriber.isCoordsForBias(testCoords2);
		
//		 * setBiasAtCoords
		xcriber.setBiasAtCoords(biasArray[0], testCoords, 0.5f);
		
//		 
		
		
//		 * transcribe
		
//		 * transcribe
		 
		// Test local methods
		
//		 * getBiasValue
		xcriber.outputMapperBias.accept(0.25f);
		float biasVal = xcriber.getBiasValue();
		
//		 * getCppnMapper
		CppnMapper mapper = xcriber.getCppnMapper(cppn);
		
//		 * populateAngles
		float[] normTgtCoords = new float[] {0.0f,1.0f, 0.5f, 0.0f};
		float[] normSrcCoords = new float[] {-0.5f,1.0f, -1.0f, 0.0f};
		xcriber.populateAngles(normTgtCoords, normSrcCoords);	// now check angle mapper
		
//		 * populateCoordinateDeltas
		xcriber.populateCoordinateDeltas(normTgtCoords, normSrcCoords);
						

		
//		 * registerInputPlugins
		xcriber.registerInputPlugins(mapper);
		
//		 * registerOutputPlugins
		xcriber.registerOutputPlugins(mapper);
		
//		 * setupActivator
		ActivatorND activatorND = xcriber.setupActivator(chrom);
		
//		 * translateOutputToWeightValue
		 float weight = xcriber.translateOutputToWeightValue(0.5f);
		 
//		 * populateCppnMapperInputs
//		xcriber.populateCppnMapperInputs(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		 
//		 * queryCppn
//			xcriber.queryCppn(mapper);
			
//		 * processOutputsFromCppn
//		xcriber.processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		*/
		//* newGridNetArray
		
		
		// xcriber.newGridNetArray(chrom, null);
		
		Activator cppn = xcriber.getCppnActivator(chrom);
		ActivatorND net = xcriber.createNet(cppn, null, new int[] {0,0}, chrom);
	}
}

