package com.alatus.djl.service.impl;

import ai.djl.Model;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.nn.Block;
import ai.djl.training.DefaultTrainingConfig;
import ai.djl.training.EasyTrain;
import ai.djl.training.Trainer;
import ai.djl.training.dataset.Dataset;
import ai.djl.training.listener.TrainingListener;
import ai.djl.training.loss.Loss;
import ai.djl.training.util.ProgressBar;
import com.alatus.djl.dataSet.HardnessDataset;
import com.alatus.djl.model.AutoEncoderModel;
import com.alatus.djl.service.HardnessAssessmentService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.math3.ml.clustering.KMeansPlusPlusClusterer;
import org.apache.commons.math3.ml.clustering.CentroidCluster;
import org.apache.commons.math3.ml.clustering.DoublePoint;
import org.apache.commons.math3.ml.distance.EuclideanDistance;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;

@Service
@Slf4j
public class HardnessAssessmentServiceImpl implements HardnessAssessmentService {

    private static final int NUM_CLUSTERS = 9;
    private static final int FEATURE_DIM = 32;

    @Override
    public void trainAndAssessHardness(String datasetPath, String modelPath) throws IOException {
        // 1. 加载数据
        HardnessDataset dataset = new HardnessDataset(datasetPath);

        // 2. 训练自编码器
        Model autoencoder = trainAutoEncoder(dataset, modelPath);

        // 3. 提取特征
        List<double[]> features = extractFeatures(autoencoder, dataset);

        // 4. K-means聚类
        int[] clusterAssignments = performClustering(features);

        // 5. 生成硬度等级映射
        int[] hardnessLevels = mapClustersToHardnessLevels(features, clusterAssignments);

        // 6. 生成9×11分数图
        double[][] hardnessMap = generateHardnessMap(hardnessLevels);

        log.info("Hardness assessment completed. Generated {}x{} hardness map.",
                hardnessMap.length, hardnessMap[0].length);

        // 保存结果
        saveHardnessResults(hardnessMap, modelPath);
    }

    private Model trainAutoEncoder(Dataset dataset, String modelPath) throws IOException {
        Model model = AutoEncoderModel.getAutoEncoderModel();

        DefaultTrainingConfig config = new DefaultTrainingConfig(Loss.l2Loss())
                .addEvaluator(new Loss())
                .addTrainingListeners(TrainingListener.Defaults.logging());

        try (Trainer trainer = model.newTrainer(config)) {
            trainer.initialize(new Shape(1, 252)); // 输入维度

            EasyTrain.fit(trainer, 50, dataset, null); // 无验证集

            // 保存模型
            model.save(Paths.get(modelPath), "hardness-autoencoder");
        }

        return model;
    }

    private List<double[]> extractFeatures(Model autoencoder, HardnessDataset dataset) {
        List<double[]> features = new ArrayList<>();

        try (NDManager manager = NDManager.newBaseManager()) {
            Block encoder = AutoEncoderModel.getEncoderBlock();
            encoder.setInitializer(autoencoder.getInitializer(), true);

            // 加载编码器权重（这里需要从自编码器中提取编码器部分的权重）
            // 简化实现：实际中需要正确加载编码器权重

            for (float[] sample : dataset.getRawSamples()) {
                NDArray input = manager.create(sample);
                NDArray encoded = encoder.forward(new NDList(input)).get(0);

                features.add(encoded.toDoubleArray());
            }
        }

        return features;
    }

    private int[] performClustering(List<double[]> features) {
        List<DoublePoint> points = new ArrayList<>();
        for (double[] feature : features) {
            points.add(new DoublePoint(feature));
        }

        KMeansPlusPlusClusterer<DoublePoint> clusterer =
                new KMeansPlusPlusClusterer<>(NUM_CLUSTERS, 100, new EuclideanDistance());

        List<CentroidCluster<DoublePoint>> clusters = clusterer.cluster(points);

        // 分配聚类标签
        int[] assignments = new int[features.size()];
        for (int i = 0; i < clusters.size(); i++) {
            for (DoublePoint point : clusters.get(i).getPoints()) {
                int pointIndex = points.indexOf(point);
                assignments[pointIndex] = i;
            }
        }

        return assignments;
    }

    private int[] mapClustersToHardnessLevels(List<double[]> features, int[] clusterAssignments) {
        // 计算每个聚类的平均特征范数（假设与硬度相关）
        double[] clusterNorms = new double[NUM_CLUSTERS];
        int[] clusterSizes = new int[NUM_CLUSTERS];

        for (int i = 0; i < features.size(); i++) {
            int cluster = clusterAssignments[i];
            double norm = calculateNorm(features.get(i));
            clusterNorms[cluster] += norm;
            clusterSizes[cluster]++;
        }

        for (int i = 0; i < NUM_CLUSTERS; i++) {
            if (clusterSizes[i] > 0) {
                clusterNorms[i] /= clusterSizes[i];
            }
        }

        // 根据范数排序聚类
        Integer[] clusterIndices = new Integer[NUM_CLUSTERS];
        for (int i = 0; i < NUM_CLUSTERS; i++) {
            clusterIndices[i] = i;
        }

        Arrays.sort(clusterIndices, Comparator.comparingDouble(i -> clusterNorms[i]));

        // 创建硬度等级映射
        int[] hardnessMapping = new int[NUM_CLUSTERS];
        for (int hardness = 1; hardness <= NUM_CLUSTERS; hardness++) {
            hardnessMapping[clusterIndices[hardness - 1]] = hardness;
        }

        // 转换聚类分配到硬度等级
        int[] hardnessLevels = new int[clusterAssignments.length];
        for (int i = 0; i < clusterAssignments.length; i++) {
            hardnessLevels[i] = hardnessMapping[clusterAssignments[i]];
        }

        return hardnessLevels;
    }

    private double[][] generateHardnessMap(int[] hardnessLevels) {
        // 假设数据对应9×11网格布局
        double[][] hardnessMap = new double[9][11];
        int index = 0;

        for (int i = 0; i < 9; i++) {
            for (int j = 0; j < 11; j++) {
                if (index < hardnessLevels.length) {
                    hardnessMap[i][j] = hardnessLevels[index++];
                } else {
                    hardnessMap[i][j] = 0; // 无数据点
                }
            }
        }

        return hardnessMap;
    }

    private double calculateNorm(double[] vector) {
        double sum = 0;
        for (double v : vector) {
            sum += v * v;
        }
        return Math.sqrt(sum);
    }

    private void saveHardnessResults(double[][] hardnessMap, String outputPath) {
        // 保存硬度图到文件
        try {
            Path outputFile = Paths.get(outputPath, "hardness_map.csv");
            List<String> lines = new ArrayList<>();

            for (double[] row : hardnessMap) {
                StringBuilder line = new StringBuilder();
                for (double value : row) {
                    line.append(String.format("%.2f,", value));
                }
                lines.add(line.substring(0, line.length() - 1)); // 移除最后一个逗号
            }

            Files.write(outputFile, lines);
            log.info("Hardness map saved to: {}", outputFile);
        } catch (IOException e) {
            log.error("Failed to save hardness map", e);
        }
    }
}