package org.wisdom.dl4j.application.service;

import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.util.ModelSerializer;
import org.nd4j.common.io.ClassPathResource;
import org.nd4j.evaluation.regression.RegressionEvaluation;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.dataset.api.preprocessor.NormalizerStandardize;
import org.nd4j.linalg.dataset.api.preprocessor.serializer.NormalizerSerializer;
import org.nd4j.linalg.learning.config.Nesterovs;
import org.nd4j.linalg.lossfunctions.LossFunctions;

import java.io.File;
import java.util.HashMap;
import java.util.Map;

/**
 * "Linear" Data Classification Example
 * <p>
 * Based on the data from Jason Baldridge:
 * https://github.com/jasonbaldridge/try-tf/tree/master/simdata
 *
 * @author Josh Patterson
 * @author Alex Black (added plots)
 */
//@Slf4j
public class DqModelBuild116 {


    public static void main(String[] args) throws Exception {
        // 随机种子
        int seed = 123;
        // 学习效率
        double learningRate = 0.001;
        // 集合数量
        int batchSize = 90;
        // 训练次数
        int nEpochs = 3000;
        // 输入参数
        int numInputs;
        // 结果
        int numOutputs = 1;
//        int numHiddenNodes = 20;

        final String filenameTrain = new ClassPathResource("/classification/J00116-7-train.csv").getFile().getPath();
        final String filenameTest = new ClassPathResource("/classification/J00116-7-eval.csv").getFile().getPath();

        RecordReader rr = new CSVRecordReader();
        rr.initialize(new FileSplit(new File(filenameTrain)));
        DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, batchSize, 0, 1);

        numInputs = trainIter.inputColumns();

        NormalizerStandardize preProcessor = new NormalizerStandardize();
        preProcessor.fit(trainIter);
//        preProcessor.save(new File("d://wfd-normalizerMin.zip"),new File("d://wfd-normalizerMax.zip"));

        NormalizerSerializer.getDefault().write(preProcessor,new File("/Users/zhangzhiyan/Desktop/temp/wfd-normalizer.zip"));


//        System.out.println("Min: " + preProcessor.getMin());
//        System.out.println("Max: " + preProcessor.getMax());
        trainIter.setPreProcessor(preProcessor);

        //Load the test/evaluation data:
        RecordReader rrTest = new CSVRecordReader();
        rrTest.initialize(new FileSplit(new File(filenameTest)));
        DataSetIterator testIter = new RecordReaderDataSetIterator(rrTest, batchSize, 0, 1);

//        NormalizerMinMaxScaler preProcessorTest = new NormalizerMinMaxScaler();
//        preProcessorTest.setFeatureStats(preProcessor.getMin(), preProcessor.getMax());
        testIter.setPreProcessor(preProcessor);

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(seed)
//                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)//随机梯度下降
                .list()
//                .layer(new DenseLayer.Builder()
//                        .nIn(numInputs)
//                        .weightInit(WeightInit.XAVIER)
//                        .activation(Activation.RELU)
//                        .updater(new AdaGrad())
//                        .nOut(numHiddenNodes)
//                        .build())
//                .layer(new CnnLossLayer.Builder()
//                        .l1Bias(0.2)
//                        .build())
                .layer(new OutputLayer.Builder(LossFunctions.LossFunction.MSE)
                        .nIn(numInputs)
                        .nOut(numOutputs)
                        .weightInit(WeightInit.XAVIER)
                        .activation(Activation.RELU)
                        .updater(new Nesterovs(learningRate, 0.9))//运动惯量
                        .build())
                .build();


        MultiLayerNetwork tempModel = new MultiLayerNetwork(conf);
        tempModel.init();
        tempModel.setListeners(new ScoreIterationListener(1000));  //Print score every 10 parameter updates


        MultiLayerNetwork model = tempModel;
        double mse = 100;
        double mae = 100;
        double rmse = 100;
        double rse = 100;
        Map<String, Double> m1 = new HashMap<>();
        Map<String, Integer> m2 = new HashMap<>();
        for (int n = 0; n < nEpochs; n++) {
            tempModel.fit(trainIter);
            testIter.reset();
            RegressionEvaluation eval2 = tempModel.evaluateRegression(testIter);
            double mse1 = eval2.meanSquaredError(0);
            double mae1 = eval2.meanAbsoluteError(0);
            double rmse1 = eval2.rootMeanSquaredError(0);
            double rse1 = eval2.relativeSquaredError(0);
            if (mse1 < mse) {
                m1.put("mse", mse1);
                m2.put("mse", (n + 1));
                mse = mse1;
            }
            if (mae1 < mae) {
                m1.put("mae", mae1);
                m2.put("mae", (n + 1));
                mae = mae1;
            }
            if (rmse1 < rmse) {
                m1.put("rmse", rmse1);
                m2.put("rmse", (n + 1));
                rmse = rmse1;
                model = tempModel;
            }
            if (rse1 < rse) {
                m1.put("rse", rse1);
                m2.put("rse", (n + 1));
                rse = rse1;
            }
            System.out.print("训练第" + (n + 1) + "次[");
            System.out.print("mse:" + mse1);
            System.out.print(" mae:" + mae1);
            System.out.print(" rmse:" + rmse1);
            System.out.print(" rse:" + rse1);
            System.out.print(" PC:"+eval2.pearsonCorrelation(0));
            System.out.print(" r^2:"+eval2.rSquared(0));
            System.out.println("]");
//            if (mse1<= 0.03 && mae1<= 0.15 && rmse1 <= 0.19 && rse1 <= 0.65)
//                break;
        }
        System.out.println("mse:" + m1.get("mse") + "," + m2.get("mse"));
        System.out.println("mae:" + m1.get("mae") + "," + m2.get("mae"));
        System.out.println("rmse:" + m1.get("rmse") + "," + m2.get("rmse"));
        System.out.println("rse:" + m1.get("rse") + "," + m2.get("rse"));

        ModelSerializer.writeModel(model, new File("/Users/zhangzhiyan/Desktop/temp/wfd-model.zip"), true);
        System.out.println("Evaluate model....");
//        Evaluation eval = new Evaluation(numOutputs);
//        while (testIter.hasNext()) {
//            DataSet t = testIter.next();
//            INDArray features = t.getFeatures();
//            INDArray lables = t.getLabels();
//            INDArray predicted = model.output(features, false);
//            eval.eval(lables, predicted);
//        }
//        System.out.println(eval.stats());
////
////        RegressionEvaluation eval2 =  new RegressionEvaluation(numOutputs);
//        testIter.reset();
//        RegressionEvaluation eval2 = model.evaluateRegression(testIter);
//        //Print the evaluation statistics
//        System.out.println(eval2.stats());

        testIter.reset();
        INDArray result = model.output(testIter);
        System.out.println("结果" + result.toFloatVector().length);
        for (Float value : result.toFloatVector()) {
            System.out.println(value*99);
        }
    }
}
