package com.demo.LSTM;

import org.deeplearning4j.datasets.iterator.impl.ListDataSetIterator;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.conf.*;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.LSTM;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.evaluation.regression.RegressionEvaluation;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Adam;
import org.nd4j.linalg.lossfunctions.LossFunctions;

import java.util.ArrayList;
import java.util.List;

public class LSTMLoadPrediction2Demo {
    public static void main(String[] args) {
        int numInputs = 1;
        int numOutputs = 1;
        int numHiddenUnits = 10;
        int numEpochs = 100;

        // 创建数据集
        double[] inputArray = {0.1, 0.2, 0.3, 0.4};
        double[] outputArray = {0.2, 0.3, 0.4, 0.5};

        List<DataSet> dataList = new ArrayList<>();
        for (int i = 0; i < inputArray.length; i++) {
            double[] input = {inputArray[i]};
            double[] output = {outputArray[i]};
            dataList.add(new DataSet(Nd4j.create(input, new int[]{1, numInputs, 1}), Nd4j.create(output, new int[]{1, numOutputs, 1})));
        }

        ListDataSetIterator<DataSet> iterator = new ListDataSetIterator<>(dataList, 1);

        // 配置神经网络
        MultiLayerConfiguration configuration = new NeuralNetConfiguration.Builder()
                .seed(123)
                .updater(new Adam())
                .weightInit(WeightInit.XAVIER)
                .list()
                .layer(new LSTM.Builder()
                        .nIn(numInputs)
                        .nOut(numHiddenUnits)
                        .activation(Activation.TANH)
                        .build())
                .layer(new RnnOutputLayer.Builder()
                        .nIn(numHiddenUnits)
                        .nOut(numOutputs)
                        .activation(Activation.IDENTITY)
                        .lossFunction(LossFunctions.LossFunction.MSE)
                        .build())
                .build();

        MultiLayerNetwork network = new MultiLayerNetwork(configuration);
        network.init();

        // 训练神经网络
        for (int i = 0; i < numEpochs; i++) {
            network.fit(iterator);
            RegressionEvaluation evaluation = network.evaluateRegression(iterator);
            System.out.println("Epoch " + i + " evaluation: " + evaluation.stats());
            iterator.reset();
        }

        // 进行预测
        double[] testInputArray = {0.5, 0.6};
        INDArray testInput = Nd4j.create(testInputArray, new int[]{1, testInputArray.length, 1});
        INDArray predictedOutput = network.rnnTimeStep(testInput).getRow(0);

        System.out.println("Predicted output: " + predictedOutput);
    }
}
