package com.fengwk.deeplearning.core;

import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;

import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.ops.transforms.Transforms;

import com.fengwk.support.exception.BaseException;
import com.fengwk.support.util.ArrayUtils;
import com.fengwk.support.util.IoUtils;
import com.fengwk.support.util.LoggerUtils;
import com.fengwk.support.util.LoggerUtils.Logger;

public class NeuralNetwork implements Serializable {
	
	private static final long serialVersionUID = 7197624512769275728L;
	
	private static final Logger LOG = LoggerUtils.getLogger(NeuralNetwork.class);
	
	private static final String DEFAULT_CURVE_KEY = "training";
	
	private Layer[] hiddenLayers;// 隐藏层
	private Layer outputLayer;// 输出层
	private float alpha;// 学习率
	private float lambada;// Frobenius norm lambada
	private float keepProp;// dropout 存活概率
	
	private volatile String currentCurveKey;// 当前曲线键
	private Map<String, List<Float>> JCurveMap;// 存储损失率,用于损失图像
	
	public NeuralNetwork(Layer[] hiddenLayers, Layer outputLayer, float alpha, float lambada, float keepProp) {
		if (outputLayer == null)
			throw new BaseException("outputLayer == null");
		if (outputLayer.unitCount() != 1) 
			throw new BaseException("outputLayer.unitCount() != 1");
		boolean hasSameLayer = false;
		if (ArrayUtils.isNotBlank(hiddenLayers)) {
			for (int a = 0; a < hiddenLayers.length; a ++) {
				hasSameLayer = hiddenLayers[a] == outputLayer;
				for (int b = a + 1; b < hiddenLayers.length; b ++)
					hasSameLayer = hiddenLayers[a] == hiddenLayers[b];
			}
		}
		if (hasSameLayer)
			throw new BaseException("can't use the same layer");
		if (keepProp <=0 || keepProp > 1)
			throw new BaseException("keepProp <=0 || keepProp > 1");
		this.hiddenLayers = hiddenLayers;
		this.outputLayer = outputLayer;
		this.alpha = alpha;
		this.lambada = lambada;
		this.keepProp = keepProp;
	}

	/**
	 * 序列化神经网络
	 * 
	 * @param path 保存路径
	 */
	public void serialize(String path) {
		IoUtils.write(IoUtils.objectToBytes(this), path);
	}

	/**
	 * 执行预测
	 * 
	 * @param x 特征列向量
	 * @return
	 */
	public float run(INDArray x) {
		return foreprop(x, 1).getFloat(0);
	}

	/**
	 * 训练神经网络
	 * 
	 * @param samples 样本集
	 * @param iter 迭代次数
	 */
	public void training(Samples samples, int iter) {
		INDArray X = samples.getX();
		INDArray y = samples.getY();
		X = batchNormalization(X);
		for (int i = 0; i < iter; i ++) {
		    LOG.debug("开始" + (i + 1) + "次迭代------------------------");
			training0(X, y);
		}
	}
	
	/**
	 * 一次训练流程
	 * 
	 * @param X
	 * @param y
	 */
	private synchronized void training0(INDArray X, INDArray y) {
		INDArray A_last = foreprop(X, keepProp);
		float J = cost(A_last, y);
		LOG.debug("损失值:" + J);
		setJCurveMap(J);
		backprop(dA_last(A_last, y));
	}
	
	/**
	 * 设置当前损失值到Map中
	 * 
	 * @param J
	 */
	private void setJCurveMap(float J) {
		if (JCurveMap == null)
			JCurveMap = new ConcurrentHashMap<>();
		if (currentCurveKey == null)
			currentCurveKey = DEFAULT_CURVE_KEY;
		List<Float> JCurve = JCurveMap.get(currentCurveKey);
		if (JCurve == null) {
			JCurve = new Vector<>();
			JCurveMap.put(currentCurveKey, JCurve);
		}
		JCurve.add(J);
		flushUi();
	}
	
	/**
	 * 正向传播流程
	 * 
	 * @param A
	 * @return
	 */
	private INDArray foreprop(INDArray A, float keepProp) {
		if (ArrayUtils.isNotBlank(hiddenLayers)) {
			for (Layer hiddenLayer: hiddenLayers)
				A = hiddenLayer.foreprop(A, keepProp);
		}
		return outputLayer.foreprop(A, keepProp);
	}
	
	/**
	 * 逆向传播流程
	 * 
	 * @param dA
	 * @return
	 */
	private void backprop(INDArray dA) {
		dA = outputLayer.backprop(dA, alpha, lambada);
		if (ArrayUtils.isNotBlank(hiddenLayers)) {
			for (int i = hiddenLayers.length - 1; i >= 0; i --)
				dA = hiddenLayers[i].backprop(dA, alpha, lambada);
		}
	}
	
	/**
	 * 成本函数,输入A,输出J
	 * 
	 * @param A_last 1*m
	 * @param y 1*m
	 * @return
	 */
	private float cost(INDArray A_last, INDArray y) {
		// mean(-(y*loga+(1-y)*log(1-a)))
		INDArray loss1 = y.mul(Transforms.log(A_last));
		INDArray loss2 = y.rsub(1).mul(Transforms.log(A_last.rsub(1)));
		return - loss1.add(loss2).mean(1).getFloat(0) + frobeniusNorm(y.size(1));
	}
	
	/**
	 * 输入归一化
	 * 
	 * @param X
	 * @return
	 */
	private static INDArray batchNormalization(INDArray X) {
		X = X.sub(X.mean(1).broadcast(X.shape()));// 求平均
		System.out.println(X);
//		X.add(n)
//		Transforms.pow(X, 2)
		Transforms.pow(X, 2).mean(1);
		INDArray sigma = Transforms.pow(X, 2).mean(1);
		return X.div(sigma.broadcast(sigma.size(0), X.size(1)));// 除以方差
	}
	
	public static void main(String[] args) {
		INDArray x = Nd4j.create(new float[][] {
			{-1, 2, 2},
			{1, 2, 1}
		});
		
		INDArray z = batchNormalization(x);
		System.out.println(z);
	}
	
	/**
	 * frobeniusNorm L2正则化
	 * 
	 * @param m 样本数量
	 * @return
	 */
	private float frobeniusNorm(int m) {
		if (lambada == 0)
			return 0f;
		float regularization = 0;
		if (ArrayUtils.isNotBlank(hiddenLayers)) {
			for (Layer hiddenLayer: hiddenLayers) 
				regularization += frobeniusNorm0(hiddenLayer.W(), m);
		}
		regularization += frobeniusNorm0(outputLayer.W(), m);
		return regularization;
	}
	
	private float frobeniusNorm0(INDArray W, int m) {
		return Transforms.pow(W, 2).sum(0, 1).getFloat(0) * lambada / 2 / m;
	}
	
	/**
	 * 最后一个A的偏导数
	 * 
	 * @param A_last 1*m
	 * @param y 1*m
	 * @return
	 */
	private INDArray dA_last(INDArray A_last, INDArray y) {
		// -y/a+(1-y)/(1-a)
		INDArray dA_last1 = y.div(A_last);
		INDArray dA_last2 = y.rsub(1).div(A_last.rsub(1));
		return dA_last2.sub(dA_last1);
	}

	/**
	 * 设置一个新的曲线键值
	 * 
	 * @param curveKey
	 */
	public synchronized void setCurveKey(String curveKey) {
		this.currentCurveKey = curveKey;
	}
	
	/**
	 * 绘制曲线
	 */
	public synchronized void drawCurve() {
		// ui to do
	}
	
	private void flushUi() {
		// flush ui
	}
	
}
