import java.io.Serializable;
public class FFNeuron implements Serializable{
	private static int count;
	public int id;
	protected double threshold;
	protected double prevthreshold;
	protected int layer;
	protected double output;
	protected TransferFunction transFunc;
	protected double momentumRate;
	protected double axonFlatness; 
	protected double learningRateCoefficient;
	protected FFNeuron[] neuronsOut; 
	protected FFNeuron[] neuronsIn; 
	protected FFSynapse[] synapsesOut;
	protected FFSynapse[] synapsesIn;
	protected double error;
	protected double cumulthresholddiff;

	public FFNeuron() {
		this.id = count++;
		this.layer = 0;
	}
	public FFNeuron (int layer, double axonFlatness, TransferFunction transFunc, double momentumRate, double learningRateCoefficient) {
		this();
		output = 0;
		this.transFunc = transFunc;
		threshold = RandGen.uniform(-1,1);
		prevthreshold = threshold;
		this.id = id;
		this.layer = layer;
		this.momentumRate = momentumRate;
		this.axonFlatness = axonFlatness;
		this.learningRateCoefficient = learningRateCoefficient;
		cumulthresholddiff = 0;
	}

	public void setLinkings (FFNeuron[] neuronsIn, FFNeuron[] neuronsOut, FFSynapse[] synapsesIn, FFSynapse[] synapsesOut) {		
		this.neuronsIn = neuronsIn;
		this.neuronsOut = neuronsOut;
		this.synapsesIn = synapsesIn;
		this.synapsesOut = synapsesOut;
	}
	public void updateOutput () {
		double activation = 0;
		for (int i = 0; i < neuronsIn.length; i++) {
			activation += neuronsIn[i].output * synapsesIn[i].weight;
		}
		activation += -1 * threshold;
		switch (transFunc) {
			case LOGISTIC:
				output = 1 / ( 1 + Math.exp( - activation / axonFlatness ) );
				break;
			case TANH: 
				output = ( 2 / ( 1 + Math.exp( - activation / axonFlatness ) ) ) - 1;
				break;
			case LINEAR:
				output = activation;
				break;
		}
	}
	public void trainOutputLayer(double rate, double target, LearningMode learningMode){
		this.error = (target - output) * getDerivative();
		if (learningMode==LearningMode.BATCH)
			batchCumulateWeights(rate);
		else if(learningMode==LearningMode.INCREMENTAL)
			incrementalUpdateWeights(rate);
	}
	public void trainHiddenLayer (double rate, LearningMode learningMode) {
		double temp_diff = 0;
		for (int i = 0; i < neuronsOut.length; i++) {
			if (learningMode==LearningMode.INCREMENTAL)
				temp_diff += neuronsOut[i].error * synapsesOut[i].previousWeight;
			else if(learningMode==LearningMode.BATCH)
				temp_diff += neuronsOut[i].error * synapsesOut[i].weight;
		}
		error = temp_diff * getDerivative();
		if (learningMode==LearningMode.INCREMENTAL)
			incrementalUpdateWeights(rate);
		else if(learningMode==LearningMode.BATCH)
			batchCumulateWeights(rate);
	}
	private void incrementalUpdateWeights (double rate) {
		double temp_weight;
		for (int i = 0; i < synapsesIn.length; i++) {
			temp_weight = synapsesIn[i].weight;
			synapsesIn[i].weight += 
				(rate * learningRateCoefficient * error * neuronsIn[i].output) 
				+ ( momentumRate * ( synapsesIn[i].weight - synapsesIn[i].previousWeight ) );
			synapsesIn[i].previousWeight = temp_weight;
			if (synapsesIn[i].accuWeightDiff != 0) {synapsesIn[i].accuWeightDiff = 0;}
		}
		temp_weight = threshold;
		threshold += ( rate * learningRateCoefficient * error * -1 ) + ( momentumRate * ( threshold - prevthreshold ) );
		prevthreshold = temp_weight;
		if (cumulthresholddiff != 0) {cumulthresholddiff = 0;}
	}
	private void batchCumulateWeights (double rate) {
		double temp_diff;
		for (int i = 0; i < synapsesIn.length; i++) {
			synapsesIn[i].accuWeightDiff += rate * learningRateCoefficient * error * neuronsIn[i].output;
		}
		cumulthresholddiff += rate * learningRateCoefficient * error * -1;
	}
	public void batchUpdateWeights (int noofepochs) {
		double temp_weight;
		for (int i = 0; i < synapsesIn.length; i++) {
			temp_weight = synapsesIn[i].weight;
			synapsesIn[i].weight +=  ( synapsesIn[i].accuWeightDiff / noofepochs ) + ( momentumRate * ( synapsesIn[i].weight - synapsesIn[i].previousWeight ) );
			synapsesIn[i].previousWeight = temp_weight;
			synapsesIn[i].accuWeightDiff = 0;
		}
		temp_weight = threshold;
		threshold += ( cumulthresholddiff / noofepochs )  + ( momentumRate * ( threshold - prevthreshold ) );
		prevthreshold = temp_weight;
		cumulthresholddiff = 0;
	}
	public double getDerivative () {
		double temp;
		switch (transFunc) {
			case LOGISTIC: 
				temp = ( output * ( 1 - output ) ) / axonFlatness; 
				break;
			case TANH: 
				temp = ( 1 - Math.pow( output , 2 ) ) / ( 2 * axonFlatness ); 
				break;
			case LINEAR: 
				temp = 1; 
				break;
			default: temp = 0; 
				break;
		}
		return temp;
	}
}
