package com.prawn.predict.controller;

import com.netflix.discovery.converters.Auto;
import com.prawn.predict.arima.ARIMA;
import com.prawn.predict.client.DataRecordClient;
import com.prawn.predict.pojo.DataRecord;
import entity.PageResult;
import entity.Result;
import entity.StatusCode;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.LSTM;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Nesterovs;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.*;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @Auther: zhy
 * @Date: 2019/7/25
 * @Description:
 */
@CrossOrigin
@RestController
@RequestMapping(value = "/forcast")
public class DataRecordPredictController {

    @Autowired
    private DataRecordClient dataRecordClient;

    /**
     * Arima预测模型
     * @param searchMap
     * @param page
     * @param size
     * @return
     */
    public Result forcastByArima(@RequestBody Map searchMap , @PathVariable int page, @PathVariable int size) {
        size = 350;//默认值
        PageResult<DataRecord> search = dataRecordClient.findSearch(searchMap, page, size);
        List<DataRecord> dataRecordList = search.getRows();


        //读取数据，将数据封装到数组中
        ArrayList<Double> arraylist=new ArrayList<Double>();
        ArrayList<Double> predictValueList = new ArrayList<>();

        //构建训练集dataSet 349 --> 100
        for (int i = dataRecordList.size() - 1; i >= 0; i-- ) {
            arraylist.add(Double.parseDouble(dataRecordList.get(i).getProcessedData() + ""));
        }

        //数据样本条数大于0的时候才进行预测
        if (dataRecordList.size() > 0) {
            int num = dataRecordList.size()/5;
            for (int i = 0;i < num;i++) {	//预测完一次就进行平移
                double[] dataArray=new double[arraylist.size() - num + i];//预测的样本数据，没预测完一个数据，数组就会扩充一个数据

                //预测完一个数据，窗口向右平移
                for (int j = 0;j < arraylist.size() - num + i;j++) {
                    dataArray[j]=arraylist.get(j);
                }

                //进行预测
                ARIMA arima=new ARIMA(dataArray);
                int []model=arima.getARIMAmodel();
                predictValueList.add(arima.aftDeal(arima.predictValue(model[0],model[1])));
                arraylist.add(arima.aftDeal(arima.predictValue(model[0],model[1])) * 1.0);
            }
        }

        Map<String, Object> resuleMap = new HashMap<>();
        //原始值
        resuleMap.put("orgindata",dataRecordList);
        //预测值
        resuleMap.put("predictdata",predictValueList);
        return new Result(true,StatusCode.OK,"请求成功",resuleMap);
    }
    
    /**
     * LSTM-RNN神经网络预测（含训练）
     * @param searchMap
     * @param page
     * @param size
     * @return
     */
    @RequestMapping(value = "/lstmrnn/{page}/{size}",method = RequestMethod.POST)
    public Result forcastByLSTMRNN(@RequestBody Map searchMap , @PathVariable int page, @PathVariable int size) {
        size = 350;//默认值
        PageResult<DataRecord> search = dataRecordClient.findSearch(searchMap, page, size);
        List<DataRecord> dataRecordList = search.getRows();

        //读取数据，将数据封装到数组中
        ArrayList<Double> arraylist=new ArrayList<Double>();
        ArrayList<Double> predictValueList = new ArrayList<>();
        double[] inputTrainDataArr = new double[dataRecordList.size() - 100];//训练输入数组
        double[] outputTrainDataArr = new double[dataRecordList.size() - 100];//训练输出数组
        double[] inputTestDataArr = new double[99];//测试输入数组
        double[] outputTestDataArr = new double[99];//测试输出数组


        //构建训练集dataSet 349 --> 100
        for (int i = dataRecordList.size() - 1,j = 0; i >= 100 && j < inputTrainDataArr.length; i--,j++) {
            inputTrainDataArr[j] = dataRecordList.get(i).getProcessedData();
        }
        for (int i = dataRecordList.size() - 1,j = 0; i >= 100 && j < inputTrainDataArr.length; i--,j++) {
            outputTrainDataArr[j] = dataRecordList.get(i).getProcessedData();
        }
        double[][][] trainDataINDInputBox = {{inputTrainDataArr}};
        double[][][] trainDataINDOutputBox = {{outputTrainDataArr}};
        INDArray trainDataINDInput = Nd4j.create(trainDataINDInputBox);
        INDArray trainDataINDoutput = Nd4j.create(trainDataINDOutputBox);
        DataSet trainData = new DataSet();
        trainData.setFeatures(trainDataINDInput);
        trainData.setLabels(trainDataINDoutput);


        //构建测试集dataSet 100--1
        for (int j = 0, i = dataRecordList.size() - 250; i > 0 && j < outputTestDataArr.length; i--,j++) {
            inputTestDataArr[j] = dataRecordList.get(i).getProcessedData();
        }
        for (int j = 0, i = dataRecordList.size() - 251; i > 0 && j < outputTestDataArr.length; i--,j++) {
            outputTestDataArr[j] = dataRecordList.get(i - 1).getProcessedData();
        }
        double[][][] testDataINDInputBox = {{inputTestDataArr}};
        double[][][] testDataINDOutputBox = {{outputTestDataArr}};
        INDArray testDataINDInput = Nd4j.create(testDataINDInputBox);
        INDArray testDataINDoutput = Nd4j.create(testDataINDOutputBox);
        DataSet testData = new DataSet();
        testData.setFeatures(testDataINDInput);
        testData.setLabels(testDataINDoutput);


        //将数据及映射到0~1的范围
        NormalizerMinMaxScaler normalizer = new NormalizerMinMaxScaler(0, 1);
        normalizer.fitLabel(true);
        normalizer.fit(trainData);              //Collect training data statistics

        normalizer.transform(trainData);
        normalizer.transform(testData);

        //配置LSTM的RNN网络
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
                .seed(140)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .weightInit(WeightInit.XAVIER)
                .updater(new Nesterovs(0.0015, 0.9))
                .list()
                .layer(0, new LSTM.Builder().activation(Activation.TANH).nIn(1).nOut(5)
                        .build())
                .layer(1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MSE)
                        .activation(org.nd4j.linalg.activations.Activation.IDENTITY).nIn(5).nOut(1).build())
                .build();

        MultiLayerNetwork net = new MultiLayerNetwork(conf);
        net.init();

        //开始训练
        int nEpochs = 200;
        System.out.println("开始训练...");
        for (int i = 0; i < nEpochs; i++) {
            net.fit(trainData);
        }
        System.out.println("训练结束");

        //开始预测---拟合数据，并非真实预测
        net.rnnTimeStep(trainData.getFeatures());
        INDArray predicted = net.rnnTimeStep(testData.getFeatures());

        //将映射的数据还原到初始数据
        normalizer.revertLabels(predicted);
        for (int i = 0; i < predicted.length(); i++) {
            predictValueList.add(predicted.getDouble(0,0,i));
        }

        //真实预测数据
        double[] realPredictData = new double[50];
        realPredictData[0] = predicted.getDouble(0, 0, predicted.length() - 1);
        for (int i = 1;i < realPredictData.length;i++) {
            INDArray indArray = Nd4j.create(new double[]{realPredictData[i - 1]}, new int[]{1, 1});
            normalizer.transform(indArray);
            INDArray realPredicted = net.rnnTimeStep(indArray);
            normalizer.revertLabels(realPredicted);
            realPredictData[i] = realPredicted.getDouble(0);
        }

        Map<String, Object> resuleMap = new HashMap<>();
        //原始值
        resuleMap.put("orgindata",dataRecordList);
        //拟合值
        resuleMap.put("predictdata",predictValueList);
        //实际预测值
        resuleMap.put("realPredictData",realPredictData);
        return new Result(true,StatusCode.OK,"请求成功",resuleMap);
    }

}
