package pl.edu.agh.nn.grossberg;

import pl.edu.agh.nn.conf.InvalidNetworkConfigurationException;
import pl.edu.agh.nn.net.AbstractNetWithConfiguration;

public class GrossbergNet extends AbstractNetWithConfiguration {

	private GrossbergConfiguration configuration;
	private double[][] weights;
	private double learningRate;
	private int currentStep;

	public GrossbergNet(GrossbergConfiguration configuration) {
		this.configuration = configuration;
	}

	@Override
	public double[] learn(double[] train, double[] test) {
		++currentStep;

		int index = findOne(train);
		double[] wi = weights[index];
		double output[] = new double[test.length];

		if (configuration.useDeltaRule()) {
			for (int j = 0; j < wi.length; ++j) {
				double actualOutput = configuration.getActivationFunction().compute(wi[j]);
				output[j] = actualOutput;
				wi[j] += learningRate * (test[j] - actualOutput)
						* configuration.getActivationFunction().deriveComputation(actualOutput);
			}
		} else {
			for (int j = 0; j < wi.length; ++j) {
				wi[j] += learningRate * (test[j] - configuration.getActivationFunction().compute(wi[j]));
			}
		}
		learningRate *= configuration.getLearningRateReduction();
		return output;
	}

	private int findOne(double[] train) {
		for (int i = 0; i < train.length; ++i) {
			if (train[i] == 1.0) {
				return i;
			}
		}
		return -1;
	}

	@Override
	public double[] compute(double[] input) {
		return activate(weights[findOne(input)]);
	}

	private double[] activate(double[] ds) {
		double[] ret = new double[ds.length];
		for (int i = 0; i < ret.length; ++i) {
			ret[i] = configuration.getActivationFunction().compute(ds[i]);
		}
		return ret;
	}

	@Override
	public void configure() throws InvalidNetworkConfigurationException {
		this.weights = new double[configuration.getInputDimension()][];

		for (int i = 0; i < weights.length; ++i) {
			double[] wi = new double[configuration.getNeuronCount()];
			for (int j = 0; j < wi.length; ++j) {
				wi[j] = configuration.getMinimumRandomNumber() + Math.random()
						* (configuration.getMaximumRandomNumber() - configuration.getMinimumRandomNumber());
			}
			weights[i] = wi;
		}
		this.learningRate = configuration.getInitialLearningRate();
		this.currentStep = 0;

	}

	@Override
	public boolean isLearningFinished() {
		return currentStep > configuration.getMaxEpochs() || learningRate < 0.0001;
	}

}
