package cn.genmer.test.security.machinelearning.deeplearning4j.mnist.V2;

import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.GradientNormalization;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.Updater;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.*;
import org.deeplearning4j.nn.conf.layers.variational.BernoulliReconstructionDistribution;
import org.deeplearning4j.nn.conf.layers.variational.VariationalAutoencoder;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.util.ModelSerializer;
import org.nd4j.evaluation.classification.Evaluation;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.learning.config.Adam;
import org.nd4j.linalg.lossfunctions.LossFunctions;

import java.io.File;

/**
 * 模型训练 数据集使用DL4j内置 也有网页版本: http://yann.lecun.com/exdb/mnist/
 */
public class MnistTrainV2 {
    public static final String BASE_PATH = "/Users/genmer/Documents/Codes/tensorFlowModel/mnist";
    public static void train() throws Exception {

        int batchSize = 32;
        int numEpochs = 3;
        int seed = 12345;
        // 设置训练数据和测试数据迭代器
        DataSetIterator trainIter = new MnistDataSetIterator(batchSize, true, seed);
        DataSetIterator testIter = new MnistDataSetIterator(batchSize, false, seed);


        // 构建神经网络的配置
        MultiLayerNetwork model = dae();
        System.out.println("模型配置信息：" + model.getLayerWiseConfigurations());
        model.init();

        model.setListeners(new ScoreIterationListener(1));
        // 训练模型
        for (int i = 0; i < numEpochs; i++) {
            model.fit(trainIter);
        }

        // 在测试数据上评估模型
        Evaluation eval = model.evaluate(testIter);
        System.out.println(eval.stats());

        // 将模型保存到本地磁盘
//        File locationToSave = new File(BASE_PATH+"/cnn_mnist_model.zip");
//        File locationToSave = new File(BASE_PATH+"/mlp_mnist_model.zip");
        File locationToSave = new File(BASE_PATH+"/dae_mnist_model.zip");
        boolean saveUpdater = true; // 是否保存updater（用于进行模型参数更新）
        ModelSerializer.writeModel(model, locationToSave, saveUpdater);
    }

    /**
     * 模型构建（之前大坑是使用了L2正则）
     * @return
     */
    public static MultiLayerNetwork cnn(){
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .updater(new Adam(0.001))
                .list()
                .layer(0, new ConvolutionLayer.Builder(5, 5)
                        .nIn(1)
                        .stride(1, 1)
                        .nOut(20)
                        .activation(Activation.IDENTITY)
                        .build())
                .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                        .kernelSize(2, 2)
                        .stride(2, 2)
                        .build())
                .layer(2, new ConvolutionLayer.Builder(5, 5)
                        .stride(1, 1)
                        .nOut(50)
                        .activation(Activation.IDENTITY)
                        .build())
                .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                        .kernelSize(2, 2)
                        .stride(2, 2)
                        .build())
                .layer(4, new DenseLayer.Builder().nOut(500).build())
                .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                        .nOut(10)
                        .activation(Activation.SOFTMAX)
                        .build())
                .setInputType(InputType.convolutionalFlat(28, 28, 1))
                .build();

        // 初始化模型并设置参数
        return new MultiLayerNetwork(conf);
    }
    public static MultiLayerNetwork mlp() {
        NeuralNetConfiguration.Builder builder = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .weightInit(WeightInit.XAVIER)
                .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .updater(Updater.ADAM);

        NeuralNetConfiguration.ListBuilder listBuilder = builder.list()
                .layer(0, new DenseLayer.Builder().activation(Activation.RELU)
                        .nIn(28 * 28).nOut(1000).build())
                .layer(1, new DenseLayer.Builder().activation(Activation.RELU)
                        .nIn(1000).nOut(500).build())
                .layer(2, new DenseLayer.Builder().activation(Activation.RELU)
                        .nIn(500).nOut(250).build())
                .layer(3, new DenseLayer.Builder().activation(Activation.RELU)
                        .nIn(250).nOut(500).build())
                .layer(4, new DenseLayer.Builder().activation(Activation.RELU)
                        .nIn(500).nOut(1000).build())
                .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                        .nIn(1000)
                        .nOut(10)
                        .activation(Activation.SOFTMAX)
                        .build());

        MultiLayerConfiguration conf = listBuilder.build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        return model;
    }
    private static MultiLayerNetwork dae(){
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .updater(Updater.ADAM)
                .list()
                .layer(0, new AutoEncoder.Builder().nIn(28 * 28).nOut(1000)
                        .activation(Activation.RELU)
                        .weightInit(WeightInit.XAVIER)
                        .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE)
                        .corruptionLevel(0.3)
                        .build())
                .layer(1, new AutoEncoder.Builder().nIn(1000).nOut(500)
                        .activation(Activation.RELU)
                        .weightInit(WeightInit.XAVIER)
                        .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE)
                        .corruptionLevel(0.3)
                        .build())
                .layer(2, new AutoEncoder.Builder().nIn(500).nOut(250)
                        .activation(Activation.RELU)
                        .weightInit(WeightInit.XAVIER)
                        .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE)
                        .corruptionLevel(0.3)
                        .build())
                .layer(3, new AutoEncoder.Builder().nIn(250).nOut(500)
                        .activation(Activation.RELU)
                        .weightInit(WeightInit.XAVIER)
                        .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE)
                        .corruptionLevel(0.3)
                        .build())
                .layer(4, new AutoEncoder.Builder().nIn(500).nOut(1000)
                        .activation(Activation.RELU)
                        .weightInit(WeightInit.XAVIER)
                        .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE)
                        .corruptionLevel(0.3)
                        .build())
                .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
                        .nIn(1000)
                        .nOut(10)
                        .activation(Activation.SOFTMAX)
                        .build())
                .build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        return model;
    }
    private static MultiLayerNetwork vae() {
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(12345)
                .updater(Updater.ADAM)
                .weightInit(WeightInit.XAVIER)
                .list()
                .layer(0, new VariationalAutoencoder.Builder()
                        .activation(Activation.LEAKYRELU)
                        .encoderLayerSizes(new int[]{1000, 500})
                        .decoderLayerSizes(new int[]{500, 1000})
                        .pzxActivationFunction(Activation.IDENTITY)
                        .reconstructionDistribution(new BernoulliReconstructionDistribution(Activation.SIGMOID))
                        .nIn(28 * 28)
                        .nOut(250)
                        .build())
                        .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();
        return net;
    }
    public static void main(String[] args) throws Exception {
        train();
    }
}