package layeredneuralnetwork;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

public class Neuron {
	
	
	private double currentValue;
	private double outputValue;
	public List<Connection> InputConnections;
	public List<Connection> OutputConnections;
//	private int hashCode;
	
	public Neuron()
	{
		this.InputConnections = new ArrayList<Connection>();
		this.OutputConnections = new ArrayList<Connection>();
		ClearValue();
//		hashCode = HashManager.instance.NeuronHash++;
	}
	
	public Neuron(double output)
	{
		this.InputConnections = new ArrayList<Connection>();
		this.OutputConnections = new ArrayList<Connection>();
		ClearValue();
		this.outputValue = output;
		this.currentValue = output;
//		hashCode = HashManager.instance.NeuronHash++;
	}
	
	public void ClearValue()
	{
		currentValue = Double.NaN;
		outputValue = Double.NaN;
	}
	
	public void setValue(double value)
	{
		this.currentValue = value;
		this.outputValue = ActivationFunction(currentValue);
	}
	
	public void Calculate()
	{
		double sum = 0;
		for (int i = 0; i < InputConnections.size(); i++)
		{
			Connection connection = InputConnections.get(i);
			Neuron inputNeuron = connection.InputNeuron;
			sum += inputNeuron.getOuput() * connection.Weighting.getWeight();
		}
		currentValue = sum;
		outputValue = ActivationFunction(currentValue);
	}
	
	public NeuronOutputPair TempCalculate(HashMap<Neuron, NeuronOutputPair> prevLayer)
	{
		double sum = 0;
		for (int i = 0; i < InputConnections.size(); i++)
		{
			Connection connection = InputConnections.get(i);
			Neuron inputNeuron = connection.InputNeuron;
			NeuronOutputPair prevValue = prevLayer.get(inputNeuron);
			if (prevValue != null)
				sum += prevValue.OutputValue * connection.Weighting.getWeight();
			else
				sum += inputNeuron.getOuput() * connection.Weighting.getWeight();
		}
		return new NeuronOutputPair(sum, ActivationFunction(sum));
	}
	
	public double BackPropagate(double dErr_wrt_dx, HashMap<Weight, RefDouble> dErr_wrt_dw)
	{
		double dErr_wrt_dy = ActivationDerivativeFunction(currentValue) * dErr_wrt_dx;
		Weight tempWeight;
		for (int i = 0; i < InputConnections.size(); i++)
		{
			Connection con = InputConnections.get(i);
			tempWeight = con.Weighting;
			RefDouble temp = dErr_wrt_dw.get(tempWeight);
			if (temp == null)
			{
				dErr_wrt_dw.put(tempWeight, new RefDouble(con.InputNeuron.getOuput() * dErr_wrt_dy, 1));
			}
			else
			{
				temp.value += con.InputNeuron.getOuput() * dErr_wrt_dy;
				temp.Count++;
			}
		}
		return dErr_wrt_dy;
	}
	
	public double SecondOrderBackPropagate(double d2Err_wrt_d2x, HashMap<Weight, RefDouble> d2Err_wrt_d2w)
	{
		double d2Err_wrt_d2y = Math.pow(ActivationDerivativeFunction(currentValue), 2) * d2Err_wrt_d2x;
		Weight tempWeight;
		for (int i = 0; i < InputConnections.size(); i++)
		{
			Connection con = InputConnections.get(i);
			tempWeight = con.Weighting;
			RefDouble temp = d2Err_wrt_d2w.get(tempWeight);
			if (temp == null)
			{
				d2Err_wrt_d2w.put(tempWeight, new RefDouble(Math.pow(con.InputNeuron.getOuput(), 2) * d2Err_wrt_d2y));
			}
			else
			{
				temp.value += con.InputNeuron.getOuput() * d2Err_wrt_d2y;
			}
		}
		return d2Err_wrt_d2y;
	}
	
	public double GetdErr_wrt_dx(HashMap<Neuron, RefDouble> dErr_wrt_dy)
	{
		double dErr_wrt_dx = 0;
		for (int i = 0; i < OutputConnections.size(); i++)
		{
			Connection con = OutputConnections.get(i);
			RefDouble val = dErr_wrt_dy.get(con.OutputNeuron);
			if (val != null)
				dErr_wrt_dx += con.Weighting.getWeight() * val.value;
		}
		return dErr_wrt_dx;
	}
	
	public double GetdErr_wrt_dx(double target)
	{
		return this.outputValue - target;
	}
	
	public double Getd2Err_wrt_d2x(HashMap<Neuron, RefDouble> d2Err_wrt_d2y)
	{
		double d2Err_wrt_d2x = 0;
		for (int i = 0; i < OutputConnections.size(); i++)
		{
			Connection con = OutputConnections.get(i);
			RefDouble val = d2Err_wrt_d2y.get(con.OutputNeuron);
			if (val != null)
				d2Err_wrt_d2x += Math.pow(con.Weighting.getWeight(), 2) * val.value;
		}
		return d2Err_wrt_d2x;
	}
	
	public double Getd2Err_wrt_d2x(double target)
	{
		return 1;
	}
	
	public double getOuput()
	{
		return outputValue;
	}
	
	public double getInput()
	{
		return currentValue;
	}
	
	//Used for bias neurons
	public void setOutput(double output)
	{
		this.outputValue = output;
	}
	
	public void setValues(double current, double output)
	{
		this.currentValue = current;
		this.outputValue = output;
	}
	
	//This is a scaling value based on the activation function described in:
	//http://www.codeproject.com/Articles/16650/Neural-Network-for-Recognition-of-Handwritten-Digi#ConvolutionalStructure
	//private double scalingValue = 1.7159;
	
	public double ActivationFunction(double input)
	{
		return Math.tanh(input);
		//return 1/(1+Math.exp(-input));
	}
	
	public double ActivationDerivativeFunction(double input)
	{
		double temp = ActivationFunction(input);
		return 1 - temp * temp;
		//return temp * (1 - temp);
	}
	
//	public double ScaledActivationFunction(double input)
//	{
//		return scalingValue * ActivationFunction(input);
//	}
//	
//	public double ScaledActivationDerivativeFunction(double input)
//	{
//		return scalingValue * ActivationDerivativeFunction(input);
//	}
	
//	@Override
//	public int hashCode() {
//		// TODO Auto-generated method stub
//		return this.hashCode;
//	}
}
