package weka.classifiers.neural.backpropagation;

import java.io.Serializable;

import weka.classifiers.neural.common.transfer.*;

public class BpModel implements Serializable{

	private static final long serialVersionUID = -6894197958878929685L;
	// 以不等长数组保存网络拓扑结构
	public Neuron[][] network;
	// 转换函数
	private TransferFunction function;

	/*
	 * @param inputNum 输入层节点数目.
	 * @param outputNum 输出层节点数目.
	 * @param numEachLayer 每一个中间层的节点数目.
	 * @param seed 初始化神经网络参数用的随机数种子
	 */
	public BpModel(int inputNum, int outputNum, int[] numEachLayer, long seed,
			int selection) throws Exception {

		// layersNum 中间层数目.至少 >= 1;
		int layersNum = numEachLayer.length;
		if (layersNum < 1)
			throw new Exception("至少有一个中间层！");
		function = TransferFunctionFactory.factory(selection);
		Neuron.initRandom(seed);
		//创建layersNum+2层节点
		network = new Neuron[layersNum + 2][];
		// 初始化输入层
		network[0] = new Neuron[inputNum];
		for (int i = 0; i < inputNum; i++)
			network[0][i] = new Neuron(0, 0, i);
		// 初始化中间层
		for (int i = 0; i < layersNum; i++) {
			network[i + 1] = new Neuron[numEachLayer[i]];
			for (int j = 0; j < numEachLayer[i]; j++) {
				if (i == 0)
					//i=0时，前一层为输入层
					network[i + 1][j] = new Neuron(inputNum, i + 1, j);
				else
					network[i + 1][j] = new Neuron(numEachLayer[i - 1], i + 1,
							j);
			}
		}
		// 初始化输出层
		network[layersNum + 1] = new Neuron[outputNum];
		for (int i = 0; i < outputNum; i++)
			network[layersNum + 1][i] = new Neuron(numEachLayer[layersNum - 1],
					layersNum + 1, i);
	}

	/*
	 * @param learningRate 学习率
	 * @param inputs 输入层的各项输入.
	 * @param outputs 输出层的目标输出.
	 */	
	public void caseTraining(double learningRate, double[] inputs,
			double[] outputs) throws Exception {
		if (network[0].length != inputs.length
				|| network[network.length - 1].length != outputs.length)
			throw new Exception("训练数据不符合神经网络拓扑结构！");

		// 对每个输入层节点，初始化
		for (int i = 0; i < inputs.length; i++) {
			network[0][i].setInput(inputs[i]);
			network[0][i].setOutput(inputs[i]);
		}
		// 对每个中间层及输出层 向前传播
		for (int i = 1; i < network.length; i++) {
			int num = network[i].length;
			// 对于每个中间层及输出层的节点
			for (int j = 0; j < num; j++) {
				double totalW = 0;
				// 对前一层的每个节点，计算前层节点输出*边权重
				for (int k = 0; k < network[i - 1].length; k++) {
					totalW += network[i - 1][k].getOutput()
							* network[i][j].getInputWeights()[k];
				}
				network[i][j].setInput(totalW + network[i][j].getBias());
				network[i][j].setOutput(function.transfer(network[i][j]
						.getInput()));
			}
		}

		// 向后传播误差
		// 计算输出层误差
		for (int i = 0; i < network[network.length - 1].length; i++) {
			Neuron current = network[network.length - 1][i];
			double realErr = outputs[i] - current.getOutput();
			current.setErr(realErr
					* function.derivative(current.getInput(), current
							.getOutput()));
		}
		// 向后传播，从最后一个到第一个隐藏层，对于每一个隐藏层单元
		for (int i = network.length - 2; i >= 1; i--) {
			// 第i层内的每个节点j
			for (int j = 0; j < network[i].length; j++) {
				Neuron current = network[i][j];
				double totalE = 0;
				// 下一层的每个节点k
				for (int k = 0; k < network[i + 1].length; k++) {
					totalE += network[i + 1][k].getErr()
							* network[i + 1][k].getInputWeights()[j];
					current.setErr(totalE
							* function.derivative(current.getInput(), current
									.getOutput()));
				}
			}
		}
		
		//计算反馈，更新权重和偏倚,从输出层到中间层
		for (int i = network.length - 1; i >= 1; i--) {
			// 第i层内的每个节点j
			for (int j = 0; j < network[i].length; j++) {
				Neuron current = network[i][j];
				// 对上一层的每个节点k
				for (int k = 0; k < network[i - 1].length; k++) {
					double deltaW=learningRate*current.getErr()*network[i-1][k].getOutput();
					current.getInputWeights()[k] += deltaW;
				}
				double deltaBias=learningRate*current.getErr();
				current.setBias(current.getBias()+deltaBias);
			}
		}
	}
	/* 
	 * @param inputs 输入层的各项输入.
	 * @return 输出层的输出.
	 */	
	public double[] classifyCase(double[] inputs) throws Exception{
		if (network[0].length != inputs.length)
			throw new Exception("训练数据不符合神经网络拓扑结构！");

		// 对每个输入层节点，初始化
		for (int i = 0; i < inputs.length; i++) {
			network[0][i].setInput(inputs[i]);
			network[0][i].setOutput(inputs[i]);
		}
		// 对每个中间层及输出层 向前传播
		for (int i = 1; i < network.length; i++) {
			int num = network[i].length;
			// 对于每个中间层及输出层的节点
			for (int j = 0; j < num; j++) {
				double totalW = 0;
				// 对前一层的每个节点，计算前层节点输出*边权重
				for (int k = 0; k < network[i - 1].length; k++) {
					totalW += network[i - 1][k].getOutput()
							* network[i][j].getInputWeights()[k];
				}
				network[i][j].setInput(totalW + network[i][j].getBias());
				network[i][j].setOutput(function.transfer(network[i][j]
						.getInput()));
			}
		}
		double[] outputs = new double[network[network.length-1].length];
		for(int i=0;i<outputs.length;i++)
			outputs[i]=network[network.length-1][i].getOutput();
		return outputs;
	}
}
