package com.fengwk.deeplearning.dl4j;

import java.util.Random;

import org.deeplearning4j.api.storage.StatsStorage;
import org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.ui.api.UIServer;
import org.deeplearning4j.ui.stats.StatsListener;
import org.deeplearning4j.ui.storage.InMemoryStatsStorage;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Sgd;
import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;

/**
 * 线性回归
 * 
 * @author fengwk
 *
 */
public class LinearRegression {
	
	//随机数种子，用于结果复现
    private static final int seed = 15;
    //对于每个miniBatch的迭代次数
    private static final int iterations = 10;
    //epoch数量(全部数据的训练次数)
    private static final int nEpochs = 20;
    //一共生成多少样本点
    private static final int nSamples = 1000;
    //Batch size: i.e., each epoch has nSamples/batchSize parameter updates
    private static final int batchSize = 100;
    //网络模型学习率
    private static final double learningRate = 0.01;
	
	private static DataSetIterator getTrainingData(int batchSize, Random rand) {
		int len = 1000;
		double[] xs = new double[len];
		double[] ys = new double[len];
		
		for (int i = 0; i < len; i ++) {
			xs[i] = rand.nextDouble();
			ys[i] = xs[i] * 0.5 + 0.1;
			System.out.println(xs[i]);
		}
		
		INDArray X = Nd4j.create(xs, new int[] {100, 1});
		INDArray y = Nd4j.create(ys, new int[] {100, 1});
		
		DataSet ds = new DataSet(X, y);
		
		return new ListDataSetIterator<>(ds.asList(), batchSize);
	}
	
	public static void main(String[] args) {
		
		DataSetIterator trainDataSet = getTrainingData(128, new Random(seed));
		
		MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
				.seed(seed)
				.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
				.weightInit(WeightInit.XAVIER)
				.updater(new Sgd(0.01))
				.list()
				.layer(0, new DenseLayer.Builder()
						.activation(Activation.IDENTITY)
						.nIn(1)
						.nOut(5)
						
						.build())
				.layer(0, new OutputLayer.Builder(LossFunction.MSE)
						.activation(Activation.IDENTITY)
						.nIn(1)
						.nOut(1)
						
						.build())
				.pretrain(false)
                .backprop(true)
				.build();
		
		MultiLayerNetwork model = new MultiLayerNetwork(conf);
		model.init();
		
		
		System.out.println(model.summary());
		
		//Initialize the user interface backend
        // 获取一个UI实例
        UIServer uiServer = UIServer.getInstance();

        //Configure where the network information (gradients, activations, score vs. time etc) is to be stored
        //Then add the StatsListener to collect this information from the network, as it trains
        // 训练的存储位置
        StatsStorage statsStorage = new InMemoryStatsStorage();             //Alternative: new FileStatsStorage(File) - see UIStorageExample

        //Attach the StatsStorage instance to the UI: this allows the contents of the StatsStorage to be visualized
        uiServer.attach(statsStorage);
		
        model.setListeners(
        		new StatsListener(statsStorage, 1), 
        		new ScoreIterationListener(1));
        
		while (true) {
			 model.fit(trainDataSet);
		}
//		for(int i = 0; i < 2000; i ++){
//            model.fit(trainDataSet);
//        }
		
//		final INDArray input = Nd4j.create(new double[] {10, 100}, new int[] {2, 1});
//		INDArray out = model.output(input, false);
//		System.out.println(out);
		
		
		
		
	}
	
}
