package mlp;

import java.io.Serializable;

import utils.RandomGenerator;

public class Neuron implements Serializable {

	private static final long serialVersionUID = 1900298420293333675L;
	
	private float _activation;
	private float tanhRslt;
	private float[] _synapticWeights;

	public static final float BIAS = -1;
	
	static final float LAMBDA = 1.5f; // parameter of the sigmoid
	
	/**
	 * Constructor- creates a new neuron with given
	 * input weights
	 * @param weights weight of each input edge
	 */
	public Neuron(float[] weights)
	{
		/* 
		 * each neuron knows the weights of each connection
		 * with neurons of the previous layer 
		 */ 
		_synapticWeights = weights;

	}
	
	// main constructor

	/**
	 * Constructor- creates a neuron with weights chosen randomly
	 * within the range [-INIT_WEIGHT_RANGE,+INIT_WEIGHT_RANGE].
	 * If neuron is in input layer, all input weights are set to
	 * be 1- avoid changing input values
	 * @param prev_n_neurons number of neurons in previous layers
	 */
	public Neuron(int prev_n_neurons)
	{ 
		/*
		 * each neuron knows the weights of each connection
		 * with neurons of the previous layer
		 */
		_synapticWeights = new float[prev_n_neurons + 1]; // add another weight input for bias

		// initialize with random weights
		for (int i = 0; i < prev_n_neurons + 1; ++i) {

			// weight[i] <-- random * 2 * RangeLimit - RangeLimit:
			_synapticWeights[i] = RandomGenerator.nextFloat(Mlp.INIT_WEIGHT_RANGE);
		}		
	}

 
	/**
	 * Activates the neuron with given inputs, returns the output
	 * @param inputs incoming data on each input edge
	 * @return activation function's result of weighted sum of inputs
	 */
	public float activate(float inputs[])
	{
		_activation = 0.0f;

		// dot product
		for (int i = 0; i < inputs.length; ++i) {
			_activation += inputs[i] * _synapticWeights[i];
		}
		// add bias
		_activation += _synapticWeights[_synapticWeights.length-1] * BIAS;
		
		// return activation result
//		tanhRslt = (float) Math.tanh(_activation);
//		return tanhRslt;
		
		return (_activation > 0) ? _activation : 0; // should be used if activation is ReLu
	}
	
	public float getActivationDerivative() // dphi(_activation)
	{
		
//		return 1 - tanhRslt * tanhRslt;
		
		return (_activation > 0) ? 1 : 0; // should be used if activation is ReLu		
	}

	public float[] getSynapticWeights() { return _synapticWeights; }
	public float getSynapticWeight(int i) { return _synapticWeights[i]; }
	public void setSynapticWeight(int i, float v) { _synapticWeights[i] = v; }
	
	public void setWeights(float[] newWeights) {
		_synapticWeights = newWeights;
	}
}
