package com.anji.hyperneat.modular;

import java.io.IOException;

import org.jgap.Chromosome;
import org.jgap.Configuration;

import com.anji.hyperneat.nd.ActivatorND;
import com.anji.hyperneat.nd.NDFloatArray;
import com.anji.hyperneat.nd.NDFloatArray.MatrixIterator;
import com.anji.hyperneat.onlinereinforcement.GridNetHebbianABC;
import com.anji.hyperneat.onlinereinforcement.GridNetNDLR;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDLR.LearningRateGranularity;
import com.anji.integration.Activator;
import com.anji.integration.TranscriberException;
import com.anji.persistence.Persistence;
import com.anji.util.DummyConfiguration;
import com.anji.util.Properties;

public class HebbianABCTranscriber extends ModularHyperNeatMultiNetTranscriber {
	public static final String HABC_ENABLE_N_DECAY = "ann.hyperneat.hebbianAbc.enable.n.decay";
	public static final String HABC_ENABLE_A_DECAY = "ann.hyperneat.hebbianAbc.enable.a.decay";
	public static final String HABC_ENABLE_B_DECAY = "ann.hyperneat.hebbianAbc.enable.b.decay";
	public static final String HABC_ENABLE_C_DECAY = "ann.hyperneat.hebbianAbc.enable.c.decay";
	public static final String HABC_FIXED_N = "ann.hyperneat.hebbianAbc.fixed.n";
	public static final String HABC_FIXED_A = "ann.hyperneat.hebbianAbc.fixed.a";
	public static final String HABC_FIXED_B = "ann.hyperneat.hebbianAbc.fixed.b";
	public static final String HABC_FIXED_C = "ann.hyperneat.hebbianAbc.fixed.c";
	
	public static final String HABC_FIXED_N_DECAY = "ann.hyperneat.hebbianAbc.fixed.n.decay";
	public static final String HABC_FIXED_A_DECAY = "ann.hyperneat.hebbianAbc.fixed.a.decay";
	public static final String HABC_FIXED_B_DECAY = "ann.hyperneat.hebbianAbc.fixed.b.decay";
	public static final String HABC_FIXED_C_DECAY = "ann.hyperneat.hebbianAbc.fixed.c.decay";

	protected float fixedN;
	protected float fixedA;
	protected float fixedB;
	protected float fixedC;
	protected float fixedNDecay;
	protected float fixedADecay;
	protected float fixedBDecay;
	protected float fixedCDecay;
	
	protected boolean enableNDecay;
	protected boolean enableADecay;
	protected boolean enableBDecay;
	protected boolean enableCDecay;
	
	protected OutputMapper outputMapperNLearningRate;
	protected OutputMapper outputMapperNLearningRateDecay;
	protected OutputMapper outputMapperALearningRate;
	protected OutputMapper outputMapperALearningRateDecay;
	protected OutputMapper outputMapperBLearningRate;
	protected OutputMapper outputMapperBLearningRateDecay;
	protected OutputMapper outputMapperCLearningRate;
	protected OutputMapper outputMapperCLearningRateDecay;
	
	public HebbianABCTranscriber(Properties props) {
		super(props);
	}

	
	@Override
	public void init(Properties props) {
		super.init(props);
		
		outputMapperNLearningRate = new OutputMapper();
		outputMapperNLearningRateDecay = new OutputMapper();
		outputMapperALearningRate = new OutputMapper();
		outputMapperALearningRateDecay = new OutputMapper();
		outputMapperBLearningRate = new OutputMapper();
		outputMapperBLearningRateDecay = new OutputMapper();
		outputMapperCLearningRate = new OutputMapper();
		outputMapperCLearningRateDecay = new OutputMapper();
		
		fixedN = props.getFloatProperty(HABC_FIXED_N, 0.0f);
		fixedA = props.getFloatProperty(HABC_FIXED_A, 0.0f);
		fixedB = props.getFloatProperty(HABC_FIXED_B, 0.0f);
		fixedC = props.getFloatProperty(HABC_FIXED_C, 0.0f);
		fixedNDecay = props.getFloatProperty(HABC_FIXED_N_DECAY, 0.0f);
		fixedADecay = props.getFloatProperty(HABC_FIXED_A_DECAY, 0.0f);
		fixedBDecay = props.getFloatProperty(HABC_FIXED_B_DECAY, 0.0f);
		fixedCDecay = props.getFloatProperty(HABC_FIXED_C_DECAY, 0.0f);
		
		enableNDecay = props.getBooleanProperty(HABC_ENABLE_N_DECAY);
		enableADecay = props.getBooleanProperty(HABC_ENABLE_A_DECAY);
		enableBDecay = props.getBooleanProperty(HABC_ENABLE_B_DECAY);
		enableCDecay = props.getBooleanProperty(HABC_ENABLE_C_DECAY);
		
		// n, A, B, C learning rates
		numCppnOutputs += (0.0f == fixedN) ? 1 : 0;
		numCppnOutputs += (0.0f == fixedA) ? 1 : 0;
		numCppnOutputs += (0.0f == fixedB) ? 1 : 0;
		numCppnOutputs += (0.0f == fixedC) ? 1 : 0;
		numCppnOutputs += (enableNDecay && 0.0f == fixedNDecay) ? 1 : 0;
		numCppnOutputs += (enableADecay && 0.0f == fixedADecay) ? 1 : 0;
		numCppnOutputs += (enableBDecay && 0.0f == fixedBDecay) ? 1 : 0;
		numCppnOutputs += (enableCDecay && 0.0f == fixedCDecay) ? 1 : 0;
	}
	
	/**
	 * Create a new GridNetNDLR activator for the HN algorithm to operate on; 
	 * weights, bias will be not be initialized.
	 * @param genotype The chromosome for this set of GridNets
	 * @return
	 */
	@Override
	protected ActivatorND setupActivator(Chromosome genotype) {
		NDFloatArray[] weights = createWeightArray();
		NDFloatArray[] nLearningRates = createWeightDimmedArray(fixedN);
		NDFloatArray[] nLearningRateDecays = enableNDecay ? createWeightDimmedArray(fixedNDecay) : null;
		NDFloatArray[] ALearningRates = createWeightDimmedArray(fixedA);
		NDFloatArray[] ALearningRateDecays = enableADecay ? createWeightDimmedArray(fixedADecay) : null;
		NDFloatArray[] BLearningRates = createWeightDimmedArray(fixedB);
		NDFloatArray[] BLearningRateDecays = enableBDecay ? createWeightDimmedArray(fixedBDecay) : null;
		NDFloatArray[] CLearningRates = createWeightDimmedArray(fixedC);
		NDFloatArray[] CLearningRateDecays = enableCDecay ?  createWeightDimmedArray(fixedCDecay) : null;
		NDFloatArray[] bias = createBiasArray();
		
		GridNetHebbianABC net = new GridNetHebbianABC(
				allLayersList
				, weights
				, bias
				, nLearningRates
				, nLearningRateDecays
				, ALearningRates
				, ALearningRateDecays
				, BLearningRates
				, BLearningRateDecays
				, CLearningRates
				, CLearningRateDecays
				, activationFunction
				, maxDimensions
				, 0
				, enableBias
				, "network " + genotype.getId()
				, LearningRateGranularity.UNIT
				, LearningRateGranularity.UNIT
				, enableLearningRateDecay);
		
		return net;
	}
	
	@Override
	protected void registerOutputPlugins(CppnMapper cppnMapper) {
		super.registerOutputPlugins(cppnMapper);
		if (0.0f == fixedN)	cppnMapper.registerOutputPlugin(outputMapperNLearningRate);
		if (0.0f == fixedA)	cppnMapper.registerOutputPlugin(outputMapperALearningRate);
		if (0.0f == fixedB)	cppnMapper.registerOutputPlugin(outputMapperBLearningRate);
		if (0.0f == fixedC)	cppnMapper.registerOutputPlugin(outputMapperCLearningRate);
		
		if (enableNDecay && 0.0f == fixedNDecay) cppnMapper.registerOutputPlugin(outputMapperNLearningRateDecay);
		if (enableADecay && 0.0f == fixedADecay) cppnMapper.registerOutputPlugin(outputMapperALearningRateDecay);
		if (enableBDecay && 0.0f == fixedBDecay) cppnMapper.registerOutputPlugin(outputMapperBLearningRateDecay);
		if (enableCDecay && 0.0f == fixedCDecay) cppnMapper.registerOutputPlugin(outputMapperCLearningRateDecay);
		
	}
	
	@Override
	protected void processOutputsFromCppn(Activator cppn, ActivatorND activatorND, int[] subNetCoords, CppnMapper cppnMapper, int layer, MatrixIterator w) {
		super.processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		
		int[] coords = w.getCurrentCoordinates();
		GridNetHebbianABC net = (GridNetHebbianABC) activatorND;
		
		if (0.0f == fixedN) {
			NDFloatArray nLearningRates = net.getNLearningRates() [layer-1];
			nLearningRates.set(translateOutputToWeightValue(outputMapperNLearningRate.getValue()), coords);
		}
		
		if (0.0f == fixedA) {
			NDFloatArray ALearningRates = net.getALearningRates() [layer-1];
			ALearningRates.set(translateOutputToWeightValue(outputMapperALearningRate.getValue()), coords);
		}
		if (0.0f == fixedB) {
			NDFloatArray BLearningRates = net.getBLearningRates() [layer-1];
			BLearningRates.set(translateOutputToWeightValue(outputMapperBLearningRate.getValue()), coords);
		}
		if (0.0f == fixedC) {
			NDFloatArray CLearningRates = net.getCLearningRates() [layer-1];
			CLearningRates.set(translateOutputToWeightValue(outputMapperCLearningRate.getValue()), coords);
		}
		
		if (enableNDecay && 0.0f == fixedNDecay) {
			NDFloatArray nLearningRateDecays = net.getNLearningRateDecays() [layer-1];
			nLearningRateDecays.set(translateOutputToWeightValue(outputMapperNLearningRateDecay.getValue()), coords);
		}
		if (enableADecay && 0.0f == fixedADecay) {
			NDFloatArray ALearningRateDecays = net.getALearningRateDecays() [layer-1];
			ALearningRateDecays.set(translateOutputToWeightValue(outputMapperALearningRateDecay.getValue()), coords);
		}
		if (enableBDecay && 0.0f == fixedBDecay) {
			NDFloatArray BLearningRateDecays = net.getBLearningRateDecays() [layer-1];
			BLearningRateDecays.set(translateOutputToWeightValue(outputMapperBLearningRateDecay.getValue()), coords);
		}
		if (enableCDecay && 0.0f == fixedCDecay) {
			NDFloatArray CLearningRateDecays = net.getCLearningRateDecays() [layer-1];
			CLearningRateDecays.set(translateOutputToWeightValue(outputMapperCLearningRateDecay.getValue()), coords);
		}

	}
	
	public static void main(String[] args) throws IOException, TranscriberException {
		// Setup
		Properties props = new Properties("properties/modTest.properties");
		HebbianABCTranscriber xcriber = new HebbianABCTranscriber(props);
		
		Persistence db = (Persistence) props.newObjectProperty( Persistence.PERSISTENCE_CLASS_KEY );
		Configuration config = new DummyConfiguration();
		Chromosome chrom = db.loadChromosome("10162", config);
		
		// Test methods from base class
		/*
//		 * createBiasArray
		NDFloatArray[] biasArray = xcriber.createBiasArray();
		
//		 * createWeightArray
		NDFloatArray[] weightArray = xcriber.createWeightArray();
		
//		 * getCppnActivator
		Activator cppn = xcriber.getCppnActivator(chrom);
		
//		 * getDepth
		int depth = xcriber.getDepth();
		
//		 * getPhenotypeClass
		Class c = xcriber.getPhenotypeClass();
		
//		 * isCoordsForBias
		int[] testCoords = new int[] {0,1,2,0,0,1,2,0};
		int[] testCoords2 = new int[] {0,1,2,0,0,1,2,1};
		boolean isBias = xcriber.isCoordsForBias(testCoords);
		boolean isBias2 = xcriber.isCoordsForBias(testCoords2);
		
//		 * setBiasAtCoords
		xcriber.setBiasAtCoords(biasArray[0], testCoords, 0.5f);
		
//		 
		
		
//		 * transcribe
		
//		 * transcribe
		 
		// Test local methods
		
//		 * getBiasValue
		xcriber.outputMapperBias.accept(0.25f);
		float biasVal = xcriber.getBiasValue();
		
//		 * getCppnMapper
		CppnMapper mapper = xcriber.getCppnMapper(cppn);
		
//		 * populateAngles
		float[] normTgtCoords = new float[] {0.0f,1.0f, 0.5f, 0.0f};
		float[] normSrcCoords = new float[] {-0.5f,1.0f, -1.0f, 0.0f};
		xcriber.populateAngles(normTgtCoords, normSrcCoords);	// now check angle mapper
		
//		 * populateCoordinateDeltas
		xcriber.populateCoordinateDeltas(normTgtCoords, normSrcCoords);
						

		
//		 * registerInputPlugins
		xcriber.registerInputPlugins(mapper);
		
//		 * registerOutputPlugins
		xcriber.registerOutputPlugins(mapper);
		
//		 * setupActivator
		ActivatorND activatorND = xcriber.setupActivator(chrom);
		
//		 * translateOutputToWeightValue
		 float weight = xcriber.translateOutputToWeightValue(0.5f);
		 
//		 * populateCppnMapperInputs
//		xcriber.populateCppnMapperInputs(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		 
//		 * queryCppn
//			xcriber.queryCppn(mapper);
			
//		 * processOutputsFromCppn
//		xcriber.processOutputsFromCppn(cppn, activatorND, subNetCoords, cppnMapper, layer, w);
		*/
		//* newGridNetArray
		
		
		// xcriber.newGridNetArray(chrom, null);
		
		Activator cppn = xcriber.getCppnActivator(chrom);
		ActivatorND net = xcriber.createNet(cppn, null, new int[] {0,0}, chrom);
	}

}
