package com.fengwk.deeplearning.study;

import java.io.Serializable;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.ops.transforms.Transforms;

import com.fengwk.support.util.IoUtils;

/**
 * 双层神经网络
 * 
 * @author fengwk
 *
 */
public class Study04_TwoLayerNeuralNetwork implements Serializable {
	
	private static final long serialVersionUID = 1128934996126987087L;
	
	private INDArray X;// n*m
	private INDArray y;// 1*m
	
	private INDArray W1;// j*n的矩阵
	private INDArray b1;// j*1的列向量
	private INDArray Z1;// j*m
	private INDArray A1;// j*m
	
	private INDArray W2;// 1*j的行向量
	private INDArray b2;// 偏移量
	private INDArray Z2;// 1*m
	private INDArray A2;// 1*m
	
	private int j;// 隐藏层节点数量
	
	private double alpha;
	private double threshold;
	
	private int n;
	private int m;
	
	private double J;
	
	public Study04_TwoLayerNeuralNetwork(INDArray X, INDArray y, int j, double alpha, double threshold) {
		this.X = X;
		this.y = y;
		this.j = j;
		this.alpha = alpha;
		this.threshold = threshold;
		this.n = X.size(0);
		this.m = X.size(1);
	}
	
	/**
	 * 序列化
	 */
	public void serialize(String filePath) {
		byte[] bytes = IoUtils.objectToBytes(this);
		IoUtils.write(bytes, filePath);
	}
	
	/**
	 * 使用集,必须先进行训练才能使用{@link #training()}
	 * 
	 * @param X
	 * @return
	 */
	public double run(INDArray X) {
		return forward0(X).getDouble(0);
	}
	
	/**
	 * 训练集
	 */
	public synchronized void training() {
		W1 = Nd4j.rand(j, n).mul(0.01);
		b1 = Nd4j.zeros(j, 1);
		W2 = Nd4j.rand(1, j).mul(0.01);
		b2 = Nd4j.zeros(1, 1);
		Double J0 = null;
		while (true) {
			J0 = J;
			forward();
			back();
			if (J0 != null && Math.abs(J - J0) < threshold)
				return;
		}
	}
	
	// 正向传播
	private void forward() {
		A2 = forward0(X);
		// 计算新的成本mean(-(y*loga+(1-y)*log(1-a)))
		J = y.mul(Transforms.log(A2)).add(y.rsub(1).mul(Transforms.log(A2.rsub(1)))).mul(-1).mean(1).getDouble(0);
	}
	
	private INDArray forward0(INDArray X) {
		Z1 = W1.mmul(X).add(b1.broadcast(j, m));// j*m
		A1 = Transforms.relu(Z1);// 这里选择ReLU作为隐藏层的激活函数
		Z2 = W2.mmul(A1).add(b2);// 1*m
		A2 = Transforms.sigmoid(Z2);// 这里选择sigmoid作为输出层的激活函数,以输出0-1范围
		return A2;
	}
	
	// 反向传播,更新W1,b1
	private void back() {
		INDArray dZ2 = A2.sub(y);// 1*m
		INDArray dW2 = dZ2.mmul(A1.transpose()).div(m);// 1*j
		INDArray db2 = dZ2.mean(1);// 1*1
		INDArray dZ1 = W2.transpose().mmul(dZ2).mul(reluDerivative(Z1));// j*m
		INDArray dW1 = dZ1.mmul(X.transpose()).div(m);// j*n
		INDArray db1 = dZ1.mean(1);// j*1
		// 更新
		W2.sub(dW2.mul(alpha));
		b2.sub(db2.getDouble(0) * alpha);
		W1.sub(dW1.mul(alpha));
		b1.sub(db1.mul(alpha));
	}

	private INDArray reluDerivative(INDArray nd) {
		for (int i = 0; i < nd.length(); i ++) {
			if (nd.getDouble(i) < 0)
				nd.put(i, Nd4j.scalar(0));
		}
		return nd;
	}
	
}
