package com.alatus.djl.dataSet;

import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.nn.SequentialBlock;
import ai.djl.nn.core.Linear;
import ai.djl.training.dataset.Dataset;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;

// 数据加载和预处理模块

@Service
@Slf4j
public class HardnessDataset implements Dataset {

    private List<float[]> dataSamples;
    private final int featureSize = 252; // 6(力+扭矩) + 3(位置) + 4(四元数) + 239(Paxini)

    public HardnessDataset(String dataDir) throws IOException {
        this.dataSamples = loadDataFromDirectory(Paths.get(dataDir));
    }

    private List<float[]> loadDataFromDirectory(Path dataDir) throws IOException {
        List<float[]> samples = new ArrayList<>();

        // 读取所有CSV文件
        Files.list(dataDir)
                .filter(path -> path.toString().endsWith(".csv"))
                .forEach(csvFile -> {
                    try {
                        List<String> lines = Files.readAllLines(csvFile);
                        if (!lines.isEmpty()) {
                            String[] values = lines.get(0).split(",");
                            float[] sample = new float[featureSize];
                            for (int i = 0; i < Math.min(featureSize, values.length); i++) {
                                sample[i] = Float.parseFloat(values[i].trim());
                            }
                            samples.add(sample);
                        }
                    } catch (Exception e) {
                        log.error("Error reading file: {}", csvFile, e);
                    }
                });

        log.info("Loaded {} samples for hardness assessment", samples.size());
        return samples;
    }

    @Override
    public Iterable<ai.djl.training.dataset.Batch> getData(NDManager manager, Batchifier batchifier) {
        List<ai.djl.training.dataset.Batch> batches = new ArrayList<>();

        int batchSize = 32;
        for (int i = 0; i < dataSamples.size(); i += batchSize) {
            int end = Math.min(i + batchSize, dataSamples.size());
            List<NDArray> data = new ArrayList<>();
            List<NDArray> labels = new ArrayList<>();

            for (int j = i; j < end; j++) {
                try (NDManager subManager = manager.newSubManager()) {
                    NDArray sample = subManager.create(dataSamples.get(j));
                    data.add(sample);
                    // 无监督学习，使用样本自身作为标签（自编码器）
                    labels.add(sample.duplicate());
                }
            }

            NDArray dataBatch = NDArrays.stack(new NDList(data.toArray(new NDArray[0])));
            NDArray labelBatch = NDArrays.stack(new NDList(labels.toArray(new NDArray[0])));

            batches.add(new ai.djl.training.dataset.Batch(
                    manager,
                    new NDList(dataBatch),
                    new NDList(labelBatch),
                    batchSize,
                    batchifier,
                    batchifier,
                    end - i,
                    Dataset.Usage.TRAIN,
                    i / batchSize
            ));
        }

        return batches;
    }

    @Override
    public Shape[] getDataShapes() {
        return new Shape[]{new Shape(featureSize)};
    }

    @Override
    public Shape[] getLabelShapes() {
        return new Shape[]{new Shape(featureSize)};
    }

    @Override
    public long size() {
        return dataSamples.size();
    }

    public static class HardnessTranslator implements Translator<float[], float[]> {
        @Override
        public NDList processInput(TranslatorContext ctx, float[] input) {
            NDArray array = ctx.getNDManager().create(input);
            return new NDList(array);
        }

        @Override
        public float[] processOutput(TranslatorContext ctx, NDList list) {
            return list.get(0).toFloatArray();
        }

        @Override
        public Batchifier getBatchifier() {
            return Batchifier.STACK;
        }
    }
}