package com.anji.hyperneat.onlinereinforcement;

import com.anji.hyperneat.nd.ActivatorND;
import com.anji.hyperneat.nd.NDFloatArray;

/**
 * Extends ActivatorND to provide methods for retrieving stored learning rates 
 * for other learning/reinforcement methods.
 *
 */
public interface ActivatorNDLR extends ActivatorND {
	/**
	 * LearningRateGranularity describes the learning rates available, either a SINGLE
	 * rate for the entire activator, a separate rate for each LAYER, or a separate 
	 * weight for each net UNIT, i.e., each weight and bias.
	 * @author Shaun Lusk
	 *
	 */
	public enum LearningRateGranularity {
		SINGLE
		,LAYER
		,UNIT
	}
	
	/**
	 * Retrieve the learning rate for the weight at the given set of coordinates;
	 * if learning rate granularity is set to SINGLE, parameters are ignored, 
	 * and may be null; if LRG is set to LAYER, only the layer coordinate is used;
	 * for UNIT, coordinates are necessary.
	 * @param coords The coordinates for which to get the learning rate.
	 * @return The learning rate.
	 */
	public float getWeightLearningRate(int layer, int... coords);
	
	/**
	 * Retrieve the learning rate for the bias at the given set of coordinates;
	 * if learning rate granularity is set to SINGLE, parameters are ignored, 
	 * and may be null; if LRG is set to LAYER, only the layer coordinate is used;
	 * for UNIT, coordinates are necessary.
	 * @param coords The coordinates for which to get the learning rate.
	 * @return The learning rate.
	 */
	public float getBiasLearningRate(int layer, int... coords);
	
	/**
	 * Get the learning rate granularity used by this activator;
	 * SINGLE - one rate for the entire activator;
	 * LAYER - one learning rate per layer;
	 * UNIT - one learning rate per weight and bias. 
	 * @return The learning rate granularity.
	 */
	public LearningRateGranularity getLearningRateGranularity();
	
	
	/**
	 * 
	 */
	public void decayLearningRates();	
	
	public NDFloatArray[] getWeightLearningRates();
	public NDFloatArray[] getBiasLearningRates();
	public NDFloatArray[] getWeightLearningRateDecays();
	public NDFloatArray[] getBiasLearningRateDecays();
	
	public float getWeightLearningRateDecay(int layer, int... coords);
	public float getBiasLearningRateDecay(int layer, int... coords);
	
	/**
	 * Support for safe copying to allow for comparison/resetting to original net.
	 * @return
	 */
	public ActivatorNDLR clone();
}
