package com.fengwk.deeplearning.study;

import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.ops.transforms.Transforms;

/**
 * 逻辑回向量化实现
 * 
 * @author fengwk
 *
 */
public class Study03_Logistic extends Study02_Logistic {

	private static final long serialVersionUID = -8829343555322644405L;
	
	private INDArray X;
	private INDArray y;
	
	/**
	 * W^T * X + b = Z
	 * X = [x1 x2 x3 ... xm]
	 * 
	 * @param X
	 * @param y
	 * @param alpha
	 * @param threshold
	 */
	public Study03_Logistic(INDArray X, INDArray y, double alpha, double threshold) {
		super(null, alpha, threshold);
		this.X = X;
		this.y = y;
		this.n = X.size(0);
		this.m = X.size(1);
	}
	
	@Override
	public void serialize(String filePath) {
		super.serialize(filePath);
	}
	
	@Override
	public double run(double[] xs) {
		return run(Nd4j.create(xs, new int[] {n, 1}));
	}
	
	public double run(INDArray x) {
		return Transforms.sigmoid(w.transpose().mmul(x)).getDouble(0);
	}
	
	@Override
	protected void init() {
		w = Nd4j.zeros(n, 1);
		b = 0;
	}
	
	@Override
	protected void gradientDescent() {
		J = 0;
		// 正向传播
		INDArray Z = w.transpose().mmul(X).add(b);// 1*n n*m 1*m
		INDArray A = Transforms.sigmoid(Z);
		INDArray loss1 = y.mul(Transforms.log(A));
		INDArray loss2 = y.rsub(1).mul(Transforms.log(A.rsub(1)));
		INDArray loss = loss1.add(loss2).mul(-1);
		J += Nd4j.mean(loss).getDouble(0);
		// 反向传播
		INDArray dz = A.sub(y);
		INDArray dw = X.mmul(dz.transpose()).div(m);// n * 1
		double db = Nd4j.mean(dz).getDouble(0);
		w = w.sub(dw.mul(alpha));
		b -= alpha * db;
	}
	
}
