package nn.layers;

import java.util.List;

import javax.management.RuntimeErrorException;

import nn.Neuron;

public class BPLayer extends Layer {
	// error
	float[] error;
	// raw output value
	float[] raw;
	// calculated values
	float[] layerResult;

	public BPLayer(int neuronNum) {
		super(neuronNum);
		error = new float[neuronNum];
		raw = new float[neuronNum];
		layerResult = new float[neuronNum];
	}

	public BPLayer(int neuronNum, String actFunction) {
		super(neuronNum, actFunction);
		error = new float[neuronNum];
		raw = new float[neuronNum];
		layerResult = new float[neuronNum];
	}

	public float[] getResults(float[] prevLayerResults) {
		float[] results = new float[size];
		if (size != neuronList.size()) {
			throw new RuntimeException("madafaka sie nie zgadza");
		}
		int it = 0;
		for (Neuron neuron : neuronList) {
			// saving raw value for later
			raw[it] = neuron.getRawValue(prevLayerResults);
			Float bias = neuron.getBias();
			float val = raw[it];
			if (bias != null) {
				val += bias;
			}
			results[it] = actFunc.calculateValue(val);
			layerResult[it] = results[it];
			++it;
		}
		return results;
	}

	/**
	 * Sets the error as last layer.
	 * 
	 * @param output
	 *            the new error as last layer
	 */
	public void setErrorAsLastLayer(float[] output) {
		for (int neuronIt = 0; neuronIt < neuronList.size(); ++neuronIt) {
			// error = Derivative (weights * inputs) * (expected - calculated)
			error[neuronIt] = actFunc.calculateDerivative(raw[neuronIt]) * (output[neuronIt] - layerResult[neuronIt]);
		}
	}

	/**
	 * Sets the error and delta as hidden layer
	 * 
	 * @param nextLayerError
	 *            the next layer error
	 * @param revertedWeights
	 *            the reverted weights
	 */
	public void setError(float[] nextLayerError, List<float[]> revertedWeights) {
		if (size != revertedWeights.size()) {
			throw new RuntimeException("layer size is different from revertedWeights size " + size + "/"
					+ revertedWeights.size());
		}
		for (int neuronIt = 0; neuronIt < size; ++neuronIt) {
			float sum = 0;
			float[] revertedWeight = revertedWeights.get(neuronIt);
			for (int nextLayerIt = 0; nextLayerIt < nextLayerError.length; ++nextLayerIt) {
				sum += nextLayerError[nextLayerIt] * revertedWeight[nextLayerIt];
				// TODO remove
				// System.out.println(sum);
				// System.out.println(size+" err "+nextLayerError[nextLayerIt]);
				// System.out.println(" w "+revertedWeight[nextLayerIt]);
			}

			error[neuronIt] = actFunc.calculateDerivative(raw[neuronIt]) * sum;
		}
	}

	public float[] getError() {
		return error.clone();
	}

	public float[] getResults() {
		return layerResult.clone();
	}

	public void celculateDelta(float learnSpeed, float momentum, float[] input) {
		for (int neuronIt = 0; neuronIt < size; ++neuronIt) {
			Neuron neuron = neuronList.get(neuronIt);
			float[] currDelta = neuron.getDeltaWeights();
			float[] prevDelta = neuron.getPrevDeltaWeights();
			int weightsLenght = currDelta.length;
			float[] newDelta = new float[weightsLenght];
			Float bias = neuron.getBias();
			for (int weightIt = 0; weightIt < weightsLenght; ++weightIt) {
				newDelta[weightIt] = learnSpeed * error[neuronIt] * input[weightIt] + momentum * prevDelta[weightIt];
			}
			neuron.setPrevDeltaWeights(currDelta);
			neuron.setDeltaWeights(newDelta);
			
			if(bias != null){
				float currBiasDelta = neuron.getBiasDelta();
				float prevBiasDelta = neuron.getPrevBiasDelta();
				neuron.setBiasDelta(learnSpeed * error[neuronIt]  + momentum * prevBiasDelta);
				neuron.setPrevBiasDelta(currBiasDelta);
			}
		}
	}

	public void updateNeuronWeights() {
		for (Neuron neuron : neuronList) {
			float[] delta = neuron.getDeltaWeights();
			float[] weights = neuron.getWeights();
			for (int i = 0; i < delta.length; ++i) {
				weights[i] += delta[i];
			}
			neuron.setWeights(weights);
			
			Float bias = neuron.getBias();
			if(bias != null){
				bias = bias +neuron.getBiasDelta();
				neuron.setBias(bias);
			}
		}

	}

}
