package com.fengwk.deeplearning.study;

import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;

import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;

/**
 * 逻辑回归训练,初步向量化
 * 
 * @author fengwk
 *
 */
public class Study02_Logistic extends Study01_Logistic {

	private static final long serialVersionUID = -139984977215387885L;

	private Map<INDArray, Double> sample;// 输入样本
	
	protected INDArray w;
	
	/**
	 * 构建
	 * 
	 * @param sample INDArray为列向量
	 * @param alpha
	 * @param threshold
	 */
	public Study02_Logistic(Map<INDArray, Double> sample, double alpha, double threshold) {
		super(null, alpha, threshold);
		this.sample = sample;
		this.alpha = alpha;
		this.threshold = threshold;
		this.n = sample.entrySet().iterator().next().getKey().size(0);
		this.m = sample.size();
	}
	
	@Override
	public void serialize(String filePath) {
		super.serialize(filePath);
	}
	
	@Override
	public double run(double[] xs) {
		return run(Nd4j.create(xs));
	}
	
	public double run(INDArray x) {
		return sigmoid(w.mmul(x).getDouble(0, 0));
	}
	
	@Override
	protected void init() {
		w = Nd4j.zeros(n);
		b = 0;
	}
	
	@Override
	protected void gradientDescent() {
		J = 0;
		INDArray dw = Nd4j.zeros(n);
		double db = 0;
		
		Iterator<Entry<INDArray, Double>> it = sample.entrySet().iterator();
		while (it.hasNext()) {
			Entry<INDArray, Double> entry = it.next();
			INDArray x = entry.getKey();
			double y = entry.getValue();
			// 正向传播
			double z = w.mmul(x).getDouble(0);
			double a = sigmoid(z);
			double loss = loss(a, y);
			J += loss;
			// 反向传播
			double dz = a - y;
			dw = dw.add(x.mul(dz));
			db += dz;
		}
		J /= m;
		w.sub(dw.div(m).mul(alpha));
		b -= alpha * db / m;
	}
	
}
