package com.iris.controller;


import com.yzc.model.entity.lris.IrisBean;
import com.yzc.result.Result;
import org.datavec.api.records.reader.impl.csv.CSVRecordReader;
import org.datavec.api.split.FileSplit;
import org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.dataset.DataSet;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.ui.standalone.ClassPathResource;
import org.deeplearning4j.util.ModelSerializer;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.api.preprocessor.DataNormalization;
import org.nd4j.linalg.dataset.api.preprocessor.NormalizerStandardize;
import org.nd4j.linalg.factory.Nd4j;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

@RestController
@RequestMapping("/iris")
public class IrisController {
    private MultiLayerNetwork model;

    //构造方法
    public IrisController() {
        try {
            File modelFile = new ClassPathResource("iris_model.zip").getFile();
            model = ModelSerializer.restoreMultiLayerNetwork(modelFile);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }


    @PostMapping("/fenlei")
    public Result fenlei(@RequestBody IrisBean bean) throws Exception {

        //使用模型判断花的类型
        //新建double数组
        double[] data = new double[]{
                bean.getSepallength(),
                bean.getSepalwidth(),
                bean.getPetallength(),
                bean.getPetalwidth()
        };
        // 将 input 转换为 INDArray，并确保其形状与 featureMatrix 的每一行一致
        INDArray input = Nd4j.create(data).reshape(1, data.length); // 形状为 [1, 4]

        // 获取训练和测试数据集
        DataSet trainAndTestData = getTrainAndTestData("iris.csv");
        INDArray featureMatrix = trainAndTestData.getFeatureMatrix();

        // 将 data 作为新的一行添加到 featureMatrix 中
        INDArray newFeatureMatrix = Nd4j.vstack(featureMatrix, input);
        trainAndTestData.setFeatures(newFeatureMatrix);
        // 标准化处理
        NormalizerStandardize normalizer = new NormalizerStandardize();
        normalizer.fit(trainAndTestData); // 训练标准化器
        normalizer.transform(trainAndTestData); // 应用标准化


        INDArray output = model.output(trainAndTestData.getFeatureMatrix());
        int prediction = Nd4j.argMax(output, 1).getInt(150);
        if (prediction == 0) {
            return Result.success("setosa");
        } else if (prediction == 1) {
            return Result.success("versicolor");
        } else {
            return Result.success("virginica");
        }


    }

    public DataSet getTrainAndTestData(String filename) throws Exception {
        int numLinesToSkip = 1;
        char delimiter = ',';
        //生成csv阅读器，读取第一行数据，数据间间用逗号分隔
        CSVRecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter);
        recordReader.initialize(new FileSplit(new ClassPathResource(filename).getFile()));

        int labelIndex = 4; // 第五个数是代表的类型
        int numClasses = 3; // 类型一种几个
        int batchSize = 150;//一个批次共多少条数据
        DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses);
        //加到内存
        DataSet allData = iterator.next();
        //打乱数据的顺序
        allData.shuffle();
        //标准化数据--归一化
        DataNormalization normalizer = new NormalizerStandardize();
        normalizer.fit(allData);
        normalizer.transform(allData);
        return allData;
    }
}
