package neural;



/**
 * @author duncanst
 * See if I can take this sample code and make it do what I want it to do.
 * First task is to just understand how it works.
 * Second tank is to start iimplementing it into our original code and to 
 * replace the prediction portion.
 */
public class sbpn {
	public static enum Normalization { ARCTAN(-Math.PI/2, Math.PI/2), SIGMOID(0, 1), LINEAR(-1, 1);
		private final double min_out;
		private final double max_out;
		private Normalization(double min, double max) { min_out = min; max_out = max; }
		public double getMinOut() { return min_out; }
		public double getMaxOut() { return max_out; }
	}
	private double activationCorrection = 5.0;
	private double learningRate = 0.001;
	private double momentum = 0.9;
	private final Normalization myNorm;
	
	private int nInputs;
	private int nHidden;
	private int nOutput;
	private double[] inputsNeurons;
	private double[] hiddenNeurons;
	private double[] outputNeurons;
	private double[][] wInputsHidden;
	private double[][] wHiddenOutput;
	private double[][] deltaInputsHidden;
	private double[][] deltaHiddenOutput;
	private double[] outputErrorGradient;
	
	private int totalGenes;
	
	public sbpn(int in, int hidden, int out, Normalization norm) {
		//create neuron values (including a bias value)
		//--------------------------------------------------------------------------------
		inputsNeurons = new double[in+1];
		for(int i=0; i<in; i++) inputsNeurons[i] = 0;
		inputsNeurons[in] = -1; //bias neuron
		
		hiddenNeurons = new double[hidden+1];
		for(int j=0; j<hidden; j++) hiddenNeurons[j] = 0;
		hiddenNeurons[hidden] = -1; //bias neuron
		
		outputNeurons = new double[out];
		for(int k=0; k<out; k++) outputNeurons[k] = 0;
		
		//create neuron weights (including bias weights)
		//--------------------------------------------------------------------------------
		wInputsHidden = new double[in+1][];
		for(int i=0; i<=in; i++) {
			wInputsHidden[i] = new double[hidden];
			for(int j=0; j<hidden; j++)
				wInputsHidden[i][j] = Math.random() - 0.5;
		}
		
		wHiddenOutput = new double[hidden+1][];
		for(int j=0; j<=hidden; j++) {
			wHiddenOutput[j] = new double[out];
			for(int k=0; k<out; k++)
				wHiddenOutput[j][k] = Math.random() - 0.5;
		}
		
		//create delta lists
		//--------------------------------------------------------------------------------
		deltaInputsHidden = new double[in+1][];
		for(int j=0; j<=in; j++) {
			deltaInputsHidden[j] = new double[hidden];
			for(int k=0; k<hidden; k++)
				deltaInputsHidden[j][k] = 0;
		}
		
		deltaHiddenOutput = new double[hidden+1][];
		for(int i=0; i<=hidden; i++) {
			deltaHiddenOutput[i] = new double[out];
			for(int j=0; j<out; j++)
				deltaHiddenOutput[i][j] = 0;
		}
		//create oEG
		//--------------------------------------------------------------------------------
		outputErrorGradient = new double[out];
		for(int k=0; k<out; k++)
			outputErrorGradient[k] = 0;
		
		nInputs = in;
		nHidden = hidden;
		nOutput = out;
		myNorm = norm;
		
		totalGenes = wInputsHidden.length*wInputsHidden[0].length + wHiddenOutput.length*wHiddenOutput[0].length;
	}
	public double getLearningRate() { return learningRate; }
	public double getMomentum() { return momentum; }
	public void setLearningRate(double lr) { learningRate = lr; }
	public void setMomentum(double m) { momentum = m; }
	
	public double[] feedInput(double[] inputs) {
		feedForward(inputs);
		return outputNeurons;
	}
	
	public void train(double[] inputs, double[] desiredValues, boolean isBatch) {
		feedForward(inputs);
		backpropagate(desiredValues, isBatch);
		if(!isBatch) updateWeights(isBatch);
	}
	public void finishBatch() { updateWeights(true); }
	
	private void feedForward(double[] inputs) {
		for(int i=0; i<inputs.length; i++) inputsNeurons[i] = inputs[i];
		
		for(int j=0; j<nHidden; j++) {
			hiddenNeurons[j] = 0;
			for(int i=0; i<=nInputs; i++) hiddenNeurons[j] += inputsNeurons[i] * wInputsHidden[i][j];
			hiddenNeurons[j] = activationFunction(hiddenNeurons[j]);
		}
		
		for(int k=0; k<nOutput; k++) {
			outputNeurons[k] = 0;
			for(int j=0; j<=nHidden; j++) outputNeurons[k] += hiddenNeurons[j] * wHiddenOutput[j][k];
			outputNeurons[k] = activationFunction(outputNeurons[k]);
		}
	}
	private void backpropagate(double[] desiredValues, boolean isBatch) {
		for(int k=0; k<nOutput; k++) {
			outputErrorGradient[k] = getOutputErrorGradient(desiredValues[k], outputNeurons[k]);
			for(int j=0; j<=nHidden; j++)
				if(!isBatch) deltaHiddenOutput[j][k] = learningRate * hiddenNeurons[j] * outputErrorGradient[k] + momentum * deltaHiddenOutput[j][k];
				else deltaHiddenOutput[j][k] += learningRate * hiddenNeurons[j] * outputErrorGradient[k];
		}
		
		for(int j=0; j<nHidden; j++) {
			double hEG = getHiddenErrorGradient(j);
			for(int i=0; i<=nInputs; i++)
				if(!isBatch) deltaInputsHidden[i][j] = learningRate * inputsNeurons[i] * hEG + momentum * deltaInputsHidden[i][j];
				else deltaInputsHidden[i][j] += learningRate * inputsNeurons[i] * hEG;
		}
	}
	private void updateWeights(boolean isBatch) {
		for(int i=0; i<=nInputs; i++)
			for(int j=0; j<nHidden; j++) {
				wInputsHidden[i][j] += deltaInputsHidden[i][j];
				if(isBatch) deltaInputsHidden[i][j] = 0;
			}
		for(int j=0; j<=nHidden; j++)
			for(int k=0; k<nOutput; k++) {
				wHiddenOutput[j][k] += deltaHiddenOutput[j][k];
				if(isBatch) deltaHiddenOutput[j][k] = 0;
			}
	}
	
	private double activationFunction(double x) {
		if(myNorm == Normalization.SIGMOID)
			return 1 / (1 + Math.exp(-activationCorrection * x));
		else if(myNorm == Normalization.ARCTAN)
			return Math.atan(x);
		else //if(myNorm == Normalization.LINEAR)
			return x;
	}
	//derivative of the activation function
	private double getOutputErrorGradient(double desired, double actual) {
		if(myNorm == Normalization.SIGMOID)
			return actual * (1-actual) * (desired - actual);
		else if(myNorm == Normalization.ARCTAN)
			return (desired - actual) / (1 + actual*actual);
		else //if(myNorm == Normalization.LINEAR)
			return 1 * (desired - actual);
	}
	private double getHiddenErrorGradient(int j) {
		double sum = 0;
		for(int k=0; k<nOutput; k++) sum += wHiddenOutput[j][k] * outputErrorGradient[k];
		
		if(myNorm == Normalization.SIGMOID)
			return hiddenNeurons[j] * (1 - hiddenNeurons[j]) * sum;
		else if(myNorm == Normalization.ARCTAN)
			return sum / (1 + hiddenNeurons[j]*hiddenNeurons[j]);
		else //if(myNorm == Normalization.LINEAR)
			return 1 * sum;
	}
	
	
	private double norm_mutation = 0.3;
	private double norm_mutation_rate = 0.3;
	private double large_mutation = 4.0;
	private double large_mutation_rate = 0.02;
	public sbpn combineAndMutate(sbpn other) {
		sbpn ret = new sbpn(nInputs, nHidden, nOutput, myNorm);
		
		int total = totalGenes;
		for(int i=0; i<total; i++) {
			if(Math.random() < 0.5)
				ret.setGeneAt(i, getGeneAt(i));
			else
				ret.setGeneAt(i, other.getGeneAt(i));
			
			if(Math.random() < norm_mutation_rate)
				ret.setGeneAt(i, ret.getGeneAt(i) + ((Math.random()*2)-1)*norm_mutation);
			if(Math.random() < large_mutation_rate)
				ret.setGeneAt(i, ret.getGeneAt(i) + ((Math.random()*2)-1)*large_mutation);
		}
		return ret;
	}
	
	private double getGeneAt(int index) {
		if(index < 0 || index > totalGenes)
			return 0.0;
		if(index < wInputsHidden.length * wInputsHidden[0].length)
			return wInputsHidden[index/nHidden][index%nHidden];
		index -= wInputsHidden.length * wInputsHidden[0].length;
		return wHiddenOutput[index/nOutput][index%nOutput];
	}
	
	private void setGeneAt(int index, double val) {
		if(index < 0 || index > totalGenes)
			return;
		
		if(index < wInputsHidden.length * wInputsHidden[0].length)
			wInputsHidden[index/nHidden][index%nHidden] = val;
		else {
			index -= wInputsHidden.length * wInputsHidden[0].length;
			wHiddenOutput[index/nOutput][index%nOutput]= val;
		}
	}
}
