package com.anji.hyperneat.onlinereinforcement;

import java.util.List;

import com.anji.hyperneat.nd.ActivatorND;
import com.anji.hyperneat.nd.GridNetND;
import com.anji.hyperneat.nd.NDFloatArray;
import com.anji.hyperneat.nd.NDFloatArray.MatrixIterator;
import com.anji.hyperneat.onlinereinforcement.ActivatorNDLR;
import com.anji.nn.activationfunction.ActivationFunction;
import com.anji.topology.Layer;
import com.anji.topology.XmlUtil;

public class GridNetNDLR extends GridNetND implements ActivatorNDLR {
	
	protected LearningRateGranularity lrGranularity;
	protected LearningRateGranularity lrDecayGranularity;
	protected NDFloatArray[] weightLearningRates;
	protected NDFloatArray[] biasLearningRates;
	protected NDFloatArray[] weightLearningRateDecays;
	protected NDFloatArray[] biasLearningRateDecays;
	protected boolean useDecay;
	
	public GridNetNDLR(List<Layer> allLayers
			, NDFloatArray[] weights
			, NDFloatArray[] bias
			, NDFloatArray[] weightLearningRates
			, NDFloatArray[] biasLearningRates
			, NDFloatArray[] weightLearningRateDecays
			, NDFloatArray[] biasLearningRateDecays
			, ActivationFunction activationFunction
			, int maxDimensions
			, int cyclesPerStep
			, boolean enableBias
			, String name
			, LearningRateGranularity lrGranularity
			, LearningRateGranularity lrDecayGranularity
			, boolean useDecay
	) {
		super(allLayers, weights, bias, activationFunction, maxDimensions, cyclesPerStep, enableBias, name);
		this.setWeightLearningRates(weightLearningRates);
		this.setBiasLearningRates(biasLearningRates);
		this.weightLearningRateDecays = weightLearningRateDecays;
		this.biasLearningRateDecays = biasLearningRateDecays;
		this.lrGranularity = lrGranularity;
		this.lrDecayGranularity = lrDecayGranularity;
		this.setUseDecay(useDecay);  
	}
	
	public GridNetNDLR(NDFloatArray[] layers
			, NDFloatArray[] weights
			, NDFloatArray[] bias
			, NDFloatArray[] weightLearningRates
			, NDFloatArray[] biasLearningRates
			, NDFloatArray[] weightLearningRateDecays
			, NDFloatArray[] biasLearningRateDecays
			, ActivationFunction activationFunction
			, int maxDimensions
			, int cyclesPerStep
			, boolean enableBias
			, String name
			, LearningRateGranularity lrGranularity
			, LearningRateGranularity lrDecayGranularity
			, boolean useDecay
	) {
		super(layers, weights, bias, activationFunction, maxDimensions, cyclesPerStep, enableBias, name);
		this.setWeightLearningRates(weightLearningRates);
		this.setBiasLearningRates(biasLearningRates);
		this.weightLearningRateDecays = weightLearningRateDecays;
		this.biasLearningRateDecays = biasLearningRateDecays;
		this.lrGranularity = lrGranularity;
		this.lrDecayGranularity = lrDecayGranularity;
		this.setUseDecay(useDecay);
	}

	/**
	 * Constructor for converting a standard GridNetND into a GridNetNDLR.
	 * This copies the references from the base GridNetND; you may wish to clone it first,
	 * if you don't want the original GridNetND mucked with. 
	 * @param base
	 * @param weightLearningRates
	 * @param biasLearningRates
	 * @param weightLearningRateDecays
	 * @param biasLearningRateDecays
	 * @param lrGranularity
	 * @param decayLrg
	 * @param useDecay
	 */
	public GridNetNDLR(GridNetND base
			, NDFloatArray[] weightLearningRates
			, NDFloatArray[] biasLearningRates
			, NDFloatArray[] weightLearningRateDecays
			, NDFloatArray[] biasLearningRateDecays
			, LearningRateGranularity lrGranularity
			, LearningRateGranularity lrDecayGranularity
			, boolean useDecay
	) {
		super(base.getLayers(), base.getWeights(), base.getBias(), base.getActivationFunction(), base.getMaxDimensions(), base.getCyclesPerStep(), base.getEnableBias(), base.getName());
		this.setWeightLearningRates(weightLearningRates);
		this.setBiasLearningRates(biasLearningRates);
		this.weightLearningRateDecays = weightLearningRateDecays;
		this.biasLearningRateDecays = biasLearningRateDecays;
		this.lrGranularity = lrGranularity;
		this.lrDecayGranularity = lrDecayGranularity;
		this.setUseDecay(useDecay);
	}
	
	
	
	@Override
	public float getWeightLearningRate(int layer, int... coords) {
		float rate = 0.0f;
		switch(lrGranularity) {
		case SINGLE:
			rate = weightLearningRates[0].get(0);
			break;
		case LAYER:
			rate = weightLearningRates[layer].get(0);
			break;
		case UNIT:
			rate = weightLearningRates[layer].get(coords);
			break;
		}
		return rate;
	}

	@Override
	public float getBiasLearningRate(int layer, int... coords) {
		float rate = 0.0f;
		switch(lrGranularity) {
		case SINGLE:
			rate = biasLearningRates[0].get(0);
			break;
		case LAYER:
			rate = biasLearningRates[layer].get(0);
			break;
		case UNIT:
			rate = biasLearningRates[layer].get(coords);
			break;
		}
		return rate;
	}
	
	
	public float getWeightLearningRateDecay(int layer, int... coords) {
		float rate = 0.0f;
		switch(lrGranularity) {
		case SINGLE:
			rate = weightLearningRateDecays[0].get(0);
			break;
		case LAYER:
			rate = weightLearningRateDecays[layer].get(0);
			break;
		case UNIT:
			rate = weightLearningRateDecays[layer].get(coords);
			break;
		}
		return rate;
	}

	public float getBiasLearningRateDecay(int layer, int... coords) {
		float rate = 0.0f;
		switch(lrGranularity) {
		case SINGLE:
			rate = biasLearningRateDecays[0].get(0);
			break;
		case LAYER:
			rate = biasLearningRateDecays[layer].get(0);
			break;
		case UNIT:
			rate = biasLearningRateDecays[layer].get(coords);
			break;
		}
		return rate;
	}
	
	@Override
	public LearningRateGranularity getLearningRateGranularity() {return lrGranularity;}

	
	public LearningRateGranularity getLearningRateDecayGranularity() {
		return lrDecayGranularity;
	}

	@Override
	public GridNetNDLR clone() {
		ActivationFunction activationFunction = this.getActivationFunction();
	    NDFloatArray[] layers = new NDFloatArray[this.getLayers().length];
	    for (int i = 0; i < layers.length; i++) layers[i] = this.getLayers()[i].clone();
		NDFloatArray[] weights = new NDFloatArray[this.getWeights().length];
		for (int i = 0; i < weights.length; i++) weights[i] = this.getWeights()[i].clone();
		NDFloatArray[] bias = null;
		if (getEnableBias()) {
			bias = new NDFloatArray[this.getBias().length];
			for (int i = 0; i < bias.length; i++) bias[i] = this.getBias()[i].clone();
		}
		
		NDFloatArray[] wgtLearningRates = new NDFloatArray[this.getWeightLearningRates().length];
		for (int i = 0; i < wgtLearningRates.length; i++) wgtLearningRates[i] = this.getWeightLearningRates()[i].clone();
		NDFloatArray[] biasLearningRates = new NDFloatArray[this.getBiasLearningRates().length];
		for (int i = 0; i < biasLearningRates.length; i++) biasLearningRates[i] = this.getBiasLearningRates()[i].clone();
		
		NDFloatArray[] weightLearningRateDecays = null;
		if (null != this.weightLearningRateDecays) {
			weightLearningRateDecays = new NDFloatArray[this.getWeightLearningRateDecays().length];
			for (int i = 0; i < weightLearningRateDecays.length; i++) weightLearningRateDecays[i] = this.getWeightLearningRateDecays()[i].clone();
		}
		NDFloatArray[] biasLearningRateDecays = null;
		if (null != this.biasLearningRateDecays){
			biasLearningRateDecays = new NDFloatArray[this.getBiasLearningRateDecays().length];
			for (int i = 0; i < biasLearningRateDecays.length; i++) biasLearningRateDecays[i] = this.getBiasLearningRateDecays()[i].clone();
		}
		
		int maxDimensions = this.getMaxDimensions();
	    String name = new String(this.getName());
	    int cyclesPerStep = this.getCyclesPerStep();
	    
	    GridNetNDLR clone = new GridNetNDLR(layers, weights, bias, wgtLearningRates, biasLearningRates, weightLearningRateDecays, biasLearningRateDecays, activationFunction, maxDimensions, cyclesPerStep, getEnableBias(), name, this.getLearningRateGranularity(), this.getLearningRateDecayGranularity(), this.isUseDecay());
	    
		return clone;
	}

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub

	}

	
	public void decayLearningRates() {
		if (!isUseDecay()) return;
			
		switch(lrGranularity) {
		case SINGLE:
			decayWeightLearningRate(0, new int[] {0});
			decayBiasLearningRate(0, new int[] {0});
			break;
		case LAYER:
			for (int layer = 0; layer < getNumLayers() - 1; layer++) {
				decayWeightLearningRate(layer, new int[] {0});
				decayBiasLearningRate(layer, new int[] {0});
			}
			break;
		case UNIT:
			for (int layer = 0; layer < getNumLayers() - 1; layer++) {
				for (MatrixIterator it = getWeights()[layer].iterator(); it.hasNext(); it.next()) {
					decayWeightLearningRate(layer, it.getCurrentCoordinates());
				}
				for (MatrixIterator it = getBias()[layer].iterator(); it.hasNext(); it.next()) {
					decayBiasLearningRate(layer, it.getCurrentCoordinates());
				}
			}
			break;
		}
	}
	
	
	protected void decayWeightLearningRate(int layer, int... coords) {
		float current = weightLearningRates[layer].get(coords);
		float decay = weightLearningRateDecays[layer].get(coords);
		
		if (current == 0) return; // Don't decay, just leave it
		
		if (current > 0) {	// Positive case, decay negatively
			if (current < decay) weightLearningRates[layer].set(0.0f, coords);
			else weightLearningRates[layer].set( current - decay , coords );
		} else {	// Negative case, decay positively
			if (current > decay) weightLearningRates[layer].set(0.0f, coords);
			else weightLearningRates[layer].set( current + decay , coords );
		}
	}
	
	protected void decayBiasLearningRate(int layer, int... coords) {
		float current = biasLearningRates[layer].get(coords);
		float decay = biasLearningRateDecays[layer].get(coords);
		
		if (current == 0) return; // Don't decay, just leave it
		
		if (current > 0) {	// Positive case, decay negatively
			if (current < decay) biasLearningRates[layer].set(0.0f, coords);
			else biasLearningRates[layer].set( current - decay , coords );
		} else {	// Negative case, decay positively
			if (current > decay) biasLearningRates[layer].set(0.0f, coords);
			else biasLearningRates[layer].set( current + decay , coords );
		}
	}


	public void setWeightLearningRates(NDFloatArray[] weightLearningRates) {
		this.weightLearningRates = weightLearningRates;
	}

	public NDFloatArray[] getWeightLearningRates() {
		return weightLearningRates;
	}

	public void setBiasLearningRates(NDFloatArray[] biasLearningRates) {
		this.biasLearningRates = biasLearningRates;
	}

	public NDFloatArray[] getBiasLearningRates() {
		return biasLearningRates;
	}


	public void setUseDecay(boolean useDecay) {
		this.useDecay = useDecay;
	}

	public boolean isUseDecay() {
		return useDecay;
	}

	@Override
	public NDFloatArray[] getWeightLearningRateDecays() {
		return weightLearningRateDecays;
	}

	@Override
	public NDFloatArray[] getBiasLearningRateDecays() {
		return biasLearningRateDecays;
	}
	
//	private float calculateNewLearningRate(int layer, int[] coords) {
//		float decay = 
//		float currentLearningRate = getWeightLearningRate(layer, coords);
//		float newLearningRate = currentLearningRate >= 0 
//			? currentLearningRate - decay
//			: currentLearningRate + decay;
//		return newLearningRate;
//	}

}
