package org.nimi317.web_gis.runnable;

import jakarta.validation.constraints.NotNull;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.datavec.api.records.reader.RecordReader;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.deeplearning4j.datasets.iterator.utilty.ListDataSetIterator;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.LSTM;
import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.nd4j.evaluation.regression.RegressionEvaluation;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.SplitTestAndTrain;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.dataset.api.preprocessor.NormalizerMinMaxScaler;
import org.nd4j.linalg.dataset.api.preprocessor.NormalizerStandardize;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.INDArrayIndex;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.nimi317.web_gis.Enum.LstmUpdaterEnum;
import org.nimi317.web_gis.form.post.LstmPost;
import org.nimi317.web_gis.form.post.ModelPost;
import org.nimi317.web_gis.entity.TestInfo;
import org.nimi317.web_gis.service.ModelService;
import org.nimi317.web_gis.service.TestinfoService;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;

import java.io.File;
import java.util.ArrayList;
import java.util.List;

/**
 * @author thunderobot
 */
@Slf4j
@Component
public class ModelRunnable {

    private final ModelService modelService;

    public static final String baseLocation = "C:\\Users\\thunderobot\\Desktop\\毕业设计\\web_gis\\src\\main\\resources\\";

    private final String base = "selfModel/";

    private final TestinfoService testinfoService;

    public ModelRunnable(@Lazy ModelService modelService, TestinfoService testinfoService) {
        this.modelService = modelService;
        this.testinfoService = testinfoService;
    }

    public void run(Integer id, String path, ModelPost post) {
        modelService.handleRunning(id);
        // 模型运行逻辑
        Integer epoch = post.getEpoch();
        Integer batchSize = post.getBatchSize();
        // 第一步构建network
        MultiLayerConfiguration configuration = buildNetwork(post);
        MultiLayerNetwork network = new MultiLayerNetwork(configuration);
        network.setIterationCount(1);
        network.init();

        // 第二步加载数据
        DataSet dataSet = loadData(id, path, post);
        if (dataSet == null) {
            return;
        }
        SplitTestAndTrain splitTestAndTrain = dataSet.splitTestAndTrain(post.getTestRate());
        DataSet train = splitTestAndTrain.getTrain();
        DataSet test = splitTestAndTrain.getTest();

        // 第三步 数据归一化或标准化
        String s = buildPath(id);
        File file = new File(s);
        if (!file.exists()) {
            file.mkdirs();
        }
        if (post.getStandard()) {
            NormalizerStandardize standardize = new NormalizerStandardize();
            standardize.fitLabel(true);
            standardize.fit(train);
            standardize.transform(train);
            standardize.transform(test);
            saveStandard(standardize, s);
        } else {
            NormalizerMinMaxScaler scaler = new NormalizerMinMaxScaler(0, 1);
            scaler.fitLabel(true);
            scaler.fit(train);
            scaler.transform(train);
            scaler.transform(test);
            saveScaler(scaler, s);
        }
        // 第三步训练模型
        ListDataSetIterator<DataSet> trainSetIterator = new ListDataSetIterator<>(train.asList(), batchSize);
        network.fit(trainSetIterator, epoch);

        // 第四步评估模型
        ListDataSetIterator<DataSet> iterator = new ListDataSetIterator<>(test.asList(), 1);
        INDArray output = network.output(iterator);
        RegressionEvaluation regressionEvaluation = new RegressionEvaluation();
        regressionEvaluation.evalTimeSeries(test.getLabels(), output, test.getLabelsMaskArray());
        //mse
        int columns = regressionEvaluation.numColumns();
        List<TestInfo> list = new ArrayList<>();
        for (int i = 0; i < columns; i++) {
            TestInfo e = new TestInfo();
            e.setMse(regressionEvaluation.meanSquaredError(i));
            e.setMae(regressionEvaluation.meanAbsoluteError(i));
            e.setRmse(regressionEvaluation.rootMeanSquaredError(i));
            e.setRse(regressionEvaluation.relativeSquaredError(i));
            e.setPc(regressionEvaluation.pearsonCorrelation(i));
            e.setR2(regressionEvaluation.rSquared(i));
            e.setModelId(id);
            list.add(e);
        }
        testinfoService.saveBatch(list);
        modelService.handleSuccess(id, buildUrl(id));
        // 第五步保存模型
        saveModel(network, s);
    }

    private MultiLayerConfiguration buildNetwork(ModelPost post) {
        List<LSTM> layer = buildLayer(post);

        NeuralNetConfiguration.ListBuilder listBuilder = buildConfiguration().list();
        for (LSTM lstm : layer) {
            listBuilder.layer(lstm);
        }
        listBuilder.layer(new RnnOutputLayer
                .Builder()
                .nOut(post.getLabelStop() - post.getLabelStart())
                .activation(Activation.IDENTITY)
                .lossFunction(LossFunctions.LossFunction.MSE)
                .build());
        return listBuilder.build();
    }

    private NeuralNetConfiguration.Builder buildConfiguration() {
        return new NeuralNetConfiguration
                .Builder()
                .seed(12345)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .weightInit(WeightInit.XAVIER).activation(Activation.TANH);
    }

    private List<LSTM> buildLayer(@NotNull ModelPost post) {
        List<LSTM> layers = new ArrayList<>();
        List<LstmPost> lstm = post.getLstm();
        for (int i = 0; i < lstm.size(); i++) {
            LstmPost lstmPost = lstm.get(i);
            LstmUpdaterEnum updater = lstmPost.getUpdater();
            if (i == 0) {
                layers.add(new LSTM.Builder()
                        .updater(updater.resolveUpdater(lstmPost.getLearningRate()))
                        .l2(lstmPost.getL2())
                        .nIn(post.getFeatureStop() - post.getFeatureStart())
                        .nOut(lstmPost.getHiddenCell())
                        .build());
            } else {
                layers.add(new LSTM.Builder()
                        .updater(updater.resolveUpdater(lstmPost.getLearningRate()))
                        .l2(lstmPost.getL2())
                        .nOut(lstmPost.getHiddenCell())
                        .build());
            }
        }
        return layers;
    }

    private DataSet loadData(Integer id, String path, ModelPost post) {

        int featureStep = post.getFeatureStep();
        int labelStep = post.getLabelStep();
        Integer labelColumnsStart = post.getLabelStart();
        Integer labelColumnsEnd = post.getLabelStop();
        Integer featureColumnsStart = post.getFeatureStart();
        Integer featureColumnsEnd = post.getFeatureStop();
        Boolean window = post.getTimeWindow();
        try (RecordReader rr = new CSVRecordReader(post.getSkip())) {
            rr.initialize(new FileSplit(new File(path)));
            int size = size(rr);

            DataSetIterator iterator = new RecordReaderDataSetIterator(rr, size);
            DataSet next = iterator.next();
            INDArray features = next.getFeatures();
            int numLength;
            int maxStep = Math.max(featureStep, labelStep);
            int rows = features.rows();
            int j = featureStep + labelStep;
            if (window) {
                numLength = rows - j + 1;
            } else {
                numLength = rows / j;
            }

            // 特征
            INDArray feature = Nd4j.create(numLength, featureColumnsEnd - featureColumnsStart, maxStep);
            // 标签
            INDArray label = Nd4j.create(numLength, labelColumnsEnd - labelColumnsStart, maxStep);
            // 掩膜数组
            INDArray zeros = Nd4j.zeros(numLength, maxStep);
            INDArray zeros1 = Nd4j.zeros(numLength, maxStep);
            zeros.put(new INDArrayIndex[]{NDArrayIndex.all(), NDArrayIndex.interval(0, labelStep)}, Nd4j.ones(numLength, labelStep));
            zeros1.put(new INDArrayIndex[]{NDArrayIndex.all(), NDArrayIndex.interval(0, featureStep)}, Nd4j.ones(numLength, featureStep));

            // 特征标签范围索引
            INDArrayIndex featureColumnRange = NDArrayIndex.interval(featureColumnsStart, featureColumnsEnd);
            INDArrayIndex labelColumnRange = NDArrayIndex.interval(labelColumnsStart, labelColumnsEnd);

            //进行预处理 处理成符合的数据
            // 判断label标签是否在范围内5.
            for (int i = 0; i < numLength; i++) {
                // 获取范围
                int start = window ? i : i * j;
                INDArrayIndex labelInterval = NDArrayIndex.interval(start + featureStep, start + featureStep + labelStep);
                INDArrayIndex dataRange = NDArrayIndex.interval(start, start + featureStep);

                //获取数据
                INDArray labelRange = features.get(labelInterval, labelColumnRange).dup();
                INDArray featureRange = features.get(dataRange, featureColumnRange).dup();

                // 获取行
                INDArray featureRow = feature.get(NDArrayIndex.point(start));
                INDArray labelRow = label.get(NDArrayIndex.point(start));

                // 进行增加
                featureRow.put(new INDArrayIndex[]{NDArrayIndex.interval(0, featureColumnsEnd - featureColumnsStart), NDArrayIndex.interval(0, featureStep)}, featureRange.isMatrix() ? featureRange.transpose() : featureRange);
                labelRow.put(new INDArrayIndex[]{NDArrayIndex.interval(0, labelColumnsEnd - labelColumnsStart), NDArrayIndex.interval(0, labelStep)}, labelRange.isMatrix() ? labelRange.transpose() : labelRange);

                //关闭
                labelRange.close();
                featureRange.close();
            }
            // 填充为dataset
            return new DataSet(feature, label, zeros1, zeros);
        } catch (Exception e) {
            log.error("", e);
            modelService.handleError(id);
            return null;
        }
    }

    private int size(RecordReader reader) {
        int size = 0;
        while (reader.hasNext()) {
            size++;
            reader.next();
        }
        reader.reset();
        return size;
    }

    private String buildUrl(Integer id) {
        return base + id;
    }

    private String buildPath(Integer id) {
        return baseLocation + base + id;
    }

    @SneakyThrows
    private void saveModel(MultiLayerNetwork network, String s) {
        network.save(new File(s + "/" + "model.zip"));
    }

    @SneakyThrows
    private void saveScaler(NormalizerMinMaxScaler scaler, String s) {
        Nd4j.saveBinary(scaler.getMin(), new File(s  + "/min.bin"));
        Nd4j.saveBinary(scaler.getMax(), new File(s + "/max.bin"));
        Nd4j.saveBinary(scaler.getLabelMin(), new File(s + "/label_min.bin"));
        Nd4j.saveBinary(scaler.getLabelMax(), new File(s + "/label_max.bin"));
    }

    @SneakyThrows
    private void saveStandard(NormalizerStandardize standard, String s) {
        Nd4j.saveBinary(standard.getMean(), new File(s +  "/mean.bin"));
        Nd4j.saveBinary(standard.getStd(), new File(s + "/std.bin"));
        Nd4j.saveBinary(standard.getLabelMean(), new File(s  + "/label_mean.bin"));
        Nd4j.saveBinary(standard.getLabelStd(), new File(s + "/label_std.bin"));
    }
}
