
package mlp;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

public class Layer implements Serializable {

	private static final long serialVersionUID = 491608126284560467L;
	
	private int _n_neurons, _prev_n_neurons;
	private ArrayList<Neuron> _neurons;
	private float _outputs[];

	/**
	 * Constructor- creates a new layer in a neural network
	 * @param weights list where i'th element contains input weights for i'th neuron
	 */
	public Layer(List< float[]> weights)
	{
		
		// set number of neurons in previous layer
		_prev_n_neurons = weights.get(0).length;
		
		// all the layers/neurons must use the same random number generator
		_n_neurons = weights.size();
		
		// allocate everything
		_neurons = new ArrayList<Neuron>();
		_outputs = new float[_n_neurons];

		for (int i = 0; i <_n_neurons; ++i) {
			_neurons.add(new Neuron(weights.get(i)));
		}
	}
		
	// main constructor
	/**
	 * Constructor- creates a new layer in a neural network.
	 * If layer is an input layer, all input weights of its
	 * neurons are set to be 1- avoid changing input values
	 * @param prev_n_neurons number of neurons in previous layer
	 * @param n_neurons number of neurons in the new layer
	 */
	public Layer(int prev_n_neurons, int n_neurons)
	{
		// all the layers/neurons must use the same random number generator
		_n_neurons = n_neurons;
		_prev_n_neurons = prev_n_neurons;

		// allocate everything
		_neurons = new ArrayList<Neuron>();
		_outputs = new float[_n_neurons];

		for (int i = 0; i < _n_neurons; ++i) {
			_neurons.add(new Neuron(_prev_n_neurons));
		}
	}
	
	// add 1 in front of the out vector
	public static float[] add_bias(float[] in)
	{
		float out[] = new float[in.length + 1];
		for (int i = 0; i < in.length; ++i)
			out[i + 1] = in[i];
		out[0] = 1.0f;
		return out;
	}
	

	// compute the output of the layer
	public float[] evaluate(float in[])
	{
		float inputs[];

		inputs = in;

		assert(getWeights(0).length == inputs.length);

		// stimulate each neuron of the layer and get its output
		for (int i = 0; i < _n_neurons; ++i) {
			_outputs[i] = _neurons.get(i).activate(inputs);
		}
		return _outputs;
	}

	public int size() { return _n_neurons; }
	
	public float getOutput(int i) {

		return (i < _outputs.length) ? _outputs[i] : Neuron.BIAS; 
	}
	
	public float getActivationDerivative(int i) { return _neurons.get(i).getActivationDerivative(); }
	
	public float[] getWeights(int i) {
		try {
			// return copy of weights
			return Arrays.copyOf(_neurons.get(i).getSynapticWeights(), _prev_n_neurons);
		} catch (Exception e) {
			System.out.println();
			return null;
		}
	}
	
	/**
	 * Returns input weights of all neurons at indicated range.
	 * All values are copied
	 * @param from start index of the range
	 * @param to end index of the range
	 * @return list where i'th element is all input weights of
	 * i'th neuron, or null if indicated range is not valid
	 */
	public List<float[]> getWeights(int from, int to) {
		
		// check boundaries
		if (from < 0 || to > _n_neurons) {
			return null;
		}
		
		// get weights of each neuron		
		List<float[]> weights = new ArrayList<float[]>(_n_neurons);
		for (int i = from ; i < to ; ++i) {			
			weights.add(Arrays.copyOf(_neurons.get(i).getSynapticWeights(), _prev_n_neurons));
		}
		
		// return all weights
		return weights;
	}
	
	public float getWeight(int i, int j) { return _neurons.get(i).getSynapticWeight(j); }
	public void setWeight(int i, int j, float v) { _neurons.get(i).setSynapticWeight(j, v); }
	public void setWeights(int i, float[] weights) { _neurons.get(i).setWeights(weights); }
	
	
	/**
	 * Adds given neuron to the end of the layer 
	 * @param newNeuron neuron to add
	 */
	public void addNeuron(Neuron newNeuron) {
		_neurons.add(newNeuron);
		++_n_neurons;
	}
	
	/**
	 * Adds given neuron to the layer at indicated position
	 * @param i position of the new neuron
	 * @param newNeuron neuron to add
	 */
	public void addNeuron(int i, Neuron newNeuron) {
		_neurons.add(i, newNeuron);
		++_n_neurons;
	}
	
	/**
	 * Removes the i'th neuron from the layer 
	 * @param i index of neuron to remove
	 */
	public void removeNeuron(int i) {
		_neurons.remove(i);
		--_n_neurons;
	}
	
	public String toString() {
		return "#neurons = " + _n_neurons;
	}
}
