package com.managertrade.util.strategy;

import com.managertrade.dto.vo.KlineWithIndicators;
import ml.dmlc.xgboost4j.java.*;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.IntStream;

public class XgbKlinePipeline {
    private static final float UP_THRESHOLD = 0.005f;   // 涨0.5%算BUY
    private static final float DOWN_THRESHOLD = 0.005f; // 跌0.5%算SELL
    private static final int TRAIN_WINDOW = 100;        // 滚动训练窗口大小
    private static final double INITIAL_BALANCE = 10000.0;   // 初始资金
    private static final double TRADE_SIZE = 1000.0;         // 每次开仓资金
    private static final double LEVERAGE = 25;               // 杠杆
    private static final double STOP_LOSS = 0.04;            // 4% 爆仓
    private static final double FEE_RATE = 0.002;            // 双边 0.2%
    private static final double SLIPPAGE = 0.0005;           // 0.05% 滑点
    private static final double FUNDING_RATE = 0.0001;       // 0.01% 每8小时
    // === 新增：全局 Booster，用于保存/加载/增量训练 ===
    private static Booster boosterGlobal = null;
    private static final String MODEL_FILE = "xgboost_kline_model.bin"; // 模型文件名

    public static final String DATA_DIR;

    static {
        // 获取当前项目目录
        String projectDir = System.getProperty("user.dir");
        File projectFile = new File(projectDir);
        File parentDir = projectFile.getParentFile();
        // 在父目录下创建 data 文件夹路径
        DATA_DIR = new File(parentDir, "data").getAbsolutePath();
        // 如果需要，确保目录存在
        File dataDirFile = new File(DATA_DIR);
        if (!dataDirFile.exists()) {
            dataDirFile.mkdirs();
        }
    }
    // === 保存模型 ===
    public static void saveModel() throws Exception {
        if (boosterGlobal == null) {
            throw new IllegalStateException("模型为空，无法保存！");
        }
        String filePath4h = DATA_DIR +"/eth_4h_"+ MODEL_FILE;
        boosterGlobal.saveModel(filePath4h);
        System.out.println("✅ 模型已保存: " + filePath4h);
    }

    // === 加载模型 ===
    public static void loadModel() throws Exception {
        String filePath4h = DATA_DIR +"/eth_4h_"+ MODEL_FILE;
        File f = new File(filePath4h);
        if (!f.exists()) {
            System.out.println("⚠️ 模型文件不存在，需先训练一次");
            return;
        }
        boosterGlobal = XGBoost.loadModel(filePath4h);
        System.out.println("✅ 模型已加载: " + filePath4h);
    }
    // 1) 固定特征顺序，务必与 buildFeatureRow 保持一致
    private static final String[] FEATURE_NAMES = new String[]{
            "macd", "volume"
    };

    // 2) 构建数据集（返回丢去最后一条后的 X、y，按时间顺序）
    public static Dataset buildDataset(List<KlineWithIndicators> rows) {
        if (rows == null || rows.size() < 3) {
            throw new IllegalArgumentException("样本太少，至少需要3条K线。");
        }
        int n = rows.size() - 1; // 最后一条没有未来 close[t+1]，丢弃
        int m = FEATURE_NAMES.length;

        float[] X = new float[n * m];
        float[] y = new float[n];

        for (int i = 0; i < n; i++) {
            KlineWithIndicators k = rows.get(i);
            KlineWithIndicators kNext = rows.get(i + 1);

            // 标签：下一根涨为1，否则0
            y[i] = (float) (kNext.getClose() > k.getClose() ? 1.0 : 0.0);

            // 特征：见 FEATURE_NAMES
            float[] fi = buildFeatureRow(k);
            System.arraycopy(fi, 0, X, i * m, m);
        }
        return new Dataset(X, y, n, m);
    }

    // 3) 将一条K线转为特征行（缺失用 Float.NaN）
    // 放在类的其它静态方法/字段同级位置
    private static float toFloat(Number v) {
        return v == null ? Float.NaN : v.floatValue();
    }

    // 你的 buildFeatureRow 方法（Java 8 兼容）
    private static float[] buildFeatureRow(KlineWithIndicators k) {
        return new float[]{
                toFloat(k.getMacd()),
                toFloat(k.getVolume())
        };
    }

    // 4) 按时间顺序切分
    public static Split splitTimeSeries(Dataset ds, double trainRatio, double validRatio) throws XGBoostError {
        int n = ds.nRows;
        int trainEnd = (int) Math.floor(n * trainRatio);
        int validEnd = (int) Math.floor(n * (trainRatio + validRatio));

        // 边界保护
        trainEnd = Math.max(1, Math.min(trainEnd, n - 2));
        validEnd = Math.max(trainEnd + 1, Math.min(validEnd, n - 1));

        DMatrix dTrain = matrixFrom(ds, 0, trainEnd);
        DMatrix dValid = matrixFrom(ds, trainEnd, validEnd);
        DMatrix dTest  = matrixFrom(ds, validEnd, n);

        return new Split(dTrain, dValid, dTest);
    }

    private static DMatrix matrixFrom(Dataset ds, int start, int end) throws XGBoostError {
        int m = ds.nCols;
        int rows = end - start;
        float[] Xpart = new float[rows * m];
        float[] ypart = new float[rows];

        System.arraycopy(ds.X, start * m, Xpart, 0, rows * m);
        System.arraycopy(ds.y, start, ypart, 0, rows);

        DMatrix dm = new DMatrix(Xpart, rows, m, Float.NaN); // 指定 NaN 为 missing
        dm.setLabel(ypart);
        return dm;
    }

    // 5) 计算 scale_pos_weight = neg/pos
    public static double calcScalePosWeight(DMatrix dtrain) throws XGBoostError {
        float[] y = dtrain.getLabel();
        int pos = 0, neg = 0;
        for (float v : y) if (v > 0.5f) pos++; else neg++;
        if (pos == 0) return 1.0; // 极端情况
        return (double) neg / (double) pos;
    }

    // 6) 训练 + 早停 + 保存最佳迭代
    public static TrainResult trainWithEarlyStop(List<KlineWithIndicators> trainList,
                                                 List<KlineWithIndicators> validList,
                                                 int numRound) throws XGBoostError {
        int horizon = 3; // 未来3根

        // === 训练集 ===
        int trainRows = trainList.size() - horizon;
        float[][] trainFeatures = new float[trainRows][];
        float[] trainLabels = new float[trainRows];
        for (int i = 0; i < trainRows; i++) {
            trainFeatures[i] = buildFeatureRowEnhanced(trainList.get(i));
            trainLabels[i] = trainList.get(i + horizon).getClose() > trainList.get(i).getClose() ? 1f : 0f;
        }

        // === 验证集 ===
        int validRows = validList.size() - horizon;
        float[][] validFeatures = new float[validRows][];
        float[] validLabels = new float[validRows];
        for (int i = 0; i < validRows; i++) {
            validFeatures[i] = buildFeatureRowEnhanced(validList.get(i));
            validLabels[i] = validList.get(i + horizon).getClose() > validList.get(i).getClose() ? 1f : 0f;
        }

        // flatten 特征矩阵
        int trainCols = trainFeatures[0].length;
        float[] flatTrain = new float[trainRows * trainCols];
        for (int i = 0; i < trainRows; i++) {
            System.arraycopy(trainFeatures[i], 0, flatTrain, i * trainCols, trainCols);
        }
        DMatrix trainMat = new DMatrix(flatTrain, trainRows, trainCols, Float.NaN);
        trainMat.setLabel(trainLabels);

        int validCols = validFeatures[0].length;
        float[] flatValid = new float[validRows * validCols];
        for (int i = 0; i < validRows; i++) {
            System.arraycopy(validFeatures[i], 0, flatValid, i * validCols, validCols);
        }
        DMatrix validMat = new DMatrix(flatValid, validRows, validCols, Float.NaN);
        validMat.setLabel(validLabels);

        // 参数
        Map<String, Object> params = new HashMap<>();
        params.put("objective", "binary:logistic");
        params.put("eta", 0.05);
        params.put("max_depth", 4);
        params.put("subsample", 0.8);
        params.put("colsample_bytree", 0.8);
        params.put("eval_metric", "auc");

        Map<String, DMatrix> watches = new HashMap<>();
        watches.put("train", trainMat);
        watches.put("valid", validMat);

        // 训练
        Booster booster = XGBoost.train(trainMat, params, numRound, watches, null, null);

        int bestIter = numRound;
        return new TrainResult(booster, bestIter);

    }


    // 7) 在验证集上扫描阈值，选 F1 最优
    public static double chooseBestThreshold(Booster booster, DMatrix dmat, float[] trueLabels) throws XGBoostError {
        float[][] preds = booster.predict(dmat);
        float[] probs = new float[preds.length];
        for (int i = 0; i < preds.length; i++) {
            probs[i] = preds[i][0];
        }

        // === 统计概率直方图 ===
        int bins = 100;
        int[] hist = new int[bins];
        for (float p : probs) {
            int idx = Math.min((int)(p * bins), bins - 1);
            hist[idx]++;
        }

        // 计算累计分布，找到 1% 和 99% 的概率阈值
        int total = probs.length;
        int cum = 0;
        double lower = 0, upper = 1;
        for (int i = 0; i < bins; i++) {
            cum += hist[i];
            if (lower == 0 && cum >= total * 0.01) {
                lower = i / (double) bins;
            }
            if (cum >= total * 0.99) {
                upper = (i + 1) / (double) bins;
                break;
            }
        }
        System.out.printf("自动选择阈值搜索范围: [%.4f, %.4f]%n", lower, upper);

        // 网格搜索最佳阈值
        double bestThreshold = 0.5;
        double bestF1 = 0;
        for (double thr = lower; thr <= upper; thr += 0.01) {
            int tp = 0, fp = 0, fn = 0, tn = 0;
            for (int i = 0; i < probs.length; i++) {
                int pred = probs[i] >= thr ? 1 : 0;
                int label = (int) trueLabels[i];
                if (pred == 1 && label == 1) tp++;
                else if (pred == 1 && label == 0) fp++;
                else if (pred == 0 && label == 1) fn++;
                else tn++;
            }
            double precision = tp + fp == 0 ? 0 : (double) tp / (tp + fp);
            double recall = tp + fn == 0 ? 0 : (double) tp / (tp + fn);
            double f1 = (precision + recall == 0) ? 0 : 2 * precision * recall / (precision + recall);
            if (f1 > bestF1) {
                bestF1 = f1;
                bestThreshold = thr;
            }
        }

        System.out.printf("最佳阈值: %.4f, F1=%.4f%n", bestThreshold, bestF1);
        return bestThreshold;
    }

    // 8) 评估：Accuracy/Precision/Recall/F1/AUC + 混淆矩阵
    public static Metrics evaluate(Booster booster, DMatrix dtest, int bestIter, double threshold) throws XGBoostError {
        float[][] pred = booster.predict(dtest, false, bestIter);
        float[] y = dtest.getLabel();
        return binMetrics(y, pred, threshold);
    }

    private static Metrics binMetrics(float[] y, float[][] pred, double th) {
        int tp=0, tn=0, fp=0, fn=0;
        int n = y.length;
        double[] prob = new double[n];
        for (int i=0;i<n;i++) {
            prob[i] = pred[i][0];
            int p = prob[i] >= th ? 1 : 0;
            int t = y[i] > 0.5 ? 1 : 0;
            if (p==1 && t==1) tp++;
            else if (p==1 && t==0) fp++;
            else if (p==0 && t==1) fn++;
            else tn++;
        }
        double precision = tp + fp == 0 ? 0 : (double) tp / (tp + fp);
        double recall    = tp + fn == 0 ? 0 : (double) tp / (tp + fn);
        double f1        = precision + recall == 0 ? 0 : 2 * precision * recall / (precision + recall);
        double acc       = (double)(tp + tn) / Math.max(1, n);
        double auc       = aucScore(y, prob);

        return new Metrics(acc, precision, recall, f1, auc, tp, fp, fn, tn);
    }

    // 简单 AUC 计算：按概率降序，累积 ROC 并做梯形积分
    private static double aucScore(float[] y, double[] prob) {
        int n = y.length;
        int[] idx = IntStream.range(0, n).boxed()
                .sorted((i, j) -> Double.compare(prob[j], prob[i]))
                .mapToInt(Integer::intValue).toArray();

        long pos = 0, neg = 0;
        for (float v : y) if (v > 0.5f) pos++; else neg++;
        if (pos == 0 || neg == 0) return 0.5;

        long rankSum = 0;
        for (int rank = 1; rank <= n; rank++) {
            int i = idx[rank - 1];
            if (y[i] > 0.5f) rankSum += rank;
        }
        // Mann–Whitney U -> AUC
        double U = rankSum - pos * (pos + 1) / 2.0;
        return U / (pos * neg);
    }

    // 9) 推理（单样本）：返回 概率 + 标签
    public static Prediction predictOne(Booster booster, KlineWithIndicators k, int bestIter, double threshold) throws XGBoostError {
        float[] fi = buildFeatureRow(k); // 保留原始特征
        for (int i = 0; i < fi.length; i++) {
            if (Float.isNaN(fi[i])) fi[i] = 0f; // 避免 NaN 导致预测异常
        }
        DMatrix dm = new DMatrix(fi, 1, FEATURE_NAMES.length, Float.NaN);
        float[][] pred = booster.predict(dm, false, bestIter);
        double p = pred[0][0];
        int label = p >= threshold ? 1 : 0;
        return new Prediction(p, label);
    }

    // 10) 保存/加载模型与阈值
    public static void saveArtifacts(Booster booster, int bestIter, double threshold, String modelPath, String metaPath) throws XGBoostError, IOException {
        booster.saveModel(modelPath); // JSON
        Properties props = new Properties();
        props.setProperty("best_iter", String.valueOf(bestIter));
        props.setProperty("threshold", String.valueOf(threshold));
        try (FileOutputStream out = new FileOutputStream(metaPath)) {
            props.store(out, "xgb kline artifacts");
        }
    }

    public static Artifacts loadArtifacts(String modelPath, String metaPath) throws XGBoostError, IOException {
        Booster booster = XGBoost.loadModel(modelPath);
        Properties props = new Properties();
        try (FileInputStream in = new FileInputStream(metaPath)) {
            props.load(in);
        }
        int bestIter = Integer.parseInt(props.getProperty("best_iter", "0"));
        double threshold = Double.parseDouble(props.getProperty("threshold", "0.5"));
        return new Artifacts(booster, bestIter, threshold);
    }

    // ======== 数据/结果结构体 ========
    public static class Dataset {
        final float[] X; final float[] y; final int nRows; final int nCols;
        public Dataset(float[] X, float[] y, int nRows, int nCols) {
            this.X = X; this.y = y; this.nRows = nRows; this.nCols = nCols;
        }
    }
    public static class Split {
        final DMatrix train, valid, test;
        public Split(DMatrix train, DMatrix valid, DMatrix test) {
            this.train = train; this.valid = valid; this.test = test;
        }
        public void close() throws XGBoostError { train.dispose(); valid.dispose(); test.dispose(); }
    }
    public static class TrainResult {
        final Booster booster; final int bestIter;
        public TrainResult(Booster booster, int bestIter) { this.booster = booster; this.bestIter = bestIter; }
    }
    public static class Metrics {
        public final double acc, precision, recall, f1, auc;
        public final int tp, fp, fn, tn;
        public Metrics(double acc, double precision, double recall, double f1, double auc, int tp, int fp, int fn, int tn) {
            this.acc = acc; this.precision = precision; this.recall = recall; this.f1 = f1; this.auc = auc;
            this.tp = tp; this.fp = fp; this.fn = fn; this.tn = tn;
        }
        @Override public String toString() {
            return String.format(Locale.US,
                    "ACC=%.4f  P=%.4f  R=%.4f  F1=%.4f  AUC=%.4f  [TP=%d, FP=%d, FN=%d, TN=%d]",
                    acc, precision, recall, f1, auc, tp, fp, fn, tn);
        }
    }
    public static class Prediction {
        public final double prob; public final int label;
        public Prediction(double prob, int label) { this.prob = prob; this.label = label; }
        @Override public String toString() {
            return String.format(Locale.US, "prob=%.4f, label=%d", prob, label);
        }
    }
    public static class Artifacts {
        public final Booster booster; public final int bestIter; public final double threshold;
        public Artifacts(Booster booster, int bestIter, double threshold) {
            this.booster = booster; this.bestIter = bestIter; this.threshold = threshold;
        }
    }


    public static String getResultXGBoostKlinePredictor(String symbol, String period) throws Exception {
        // 1. 假设你已经按时间排序好了 data（从旧到新）
        List<KlineWithIndicators> data = loadKlineData(); // 自行实现加载数据的方法

        // 2. 将 K 线数据转换为 XGBoost 可用的 Dataset（特征矩阵 + 标签）
        XgbKlinePipeline.Dataset ds = XgbKlinePipeline.buildDataset(data);

        // 3. 按时间顺序切分训练集 / 验证集 / 测试集
        XgbKlinePipeline.Split split = XgbKlinePipeline.splitTimeSeries(ds, 0.70, 0.15);
        DMatrix train = split.train;

        Map<String, List<KlineWithIndicators>> stringListMap = splitTimeSeriesMap(data, 0.70, 0.15);
        // 4. 使用训练集训练 XGBoost 模型，最大迭代 500 轮，自动早停
        XgbKlinePipeline.TrainResult tr = XgbKlinePipeline.trainWithEarlyStop(stringListMap.get("trainList"),stringListMap.get("validList"), 500);
     // 从 split.valid 提取标签
        float[] validLabels = split.valid.getLabel();
        // 5. 在验证集上扫描阈值，选择 F1 最优阈值
        double bestTh = XgbKlinePipeline.chooseBestThreshold(tr.booster, split.valid, validLabels);

        // 6. 在训练集上计算准确率
        XgbKlinePipeline.Metrics trainMetrics = XgbKlinePipeline.evaluate(tr.booster, split.train, tr.bestIter, bestTh);
        System.out.println("Train -> " + trainMetrics);

        // 7. 在验证集上计算准确率
        XgbKlinePipeline.Metrics validMetrics = XgbKlinePipeline.evaluate(tr.booster, split.valid, tr.bestIter, bestTh);
        System.out.println("Valid -> " + validMetrics);

        // 8. 在测试集上计算准确率
        XgbKlinePipeline.Metrics testMetrics = XgbKlinePipeline.evaluate(tr.booster, split.test, tr.bestIter, bestTh);
        System.out.println("Test  -> " + testMetrics);

        // 9. 保存模型文件和阈值文件，用于线上推理
        XgbKlinePipeline.saveArtifacts(tr.booster, tr.bestIter, bestTh,
                "xgb_kline.json", "xgb_kline.meta");

        // 10. 推理未来 5 根 K 线涨跌
        StringBuilder sb = new StringBuilder();
        List<KlineWithIndicators> rollingData = new ArrayList<>(data); // 用于滚动预测
        sb.append("未来5根K线预测结果（1=涨，0=跌）：\n");

        for (int i = 0; i < 5; i++) {
            KlineWithIndicators latest = rollingData.get(rollingData.size() - 1);

            // 使用 XGBoost 模型预测下一根 K 线涨跌概率和标签
            XgbKlinePipeline.Prediction p = XgbKlinePipeline.predictOne(tr.booster, latest, tr.bestIter, bestTh);

            sb.append(String.format("第%d根: prob=%.4f, label=%d\n", i + 1, p.prob, p.label));

            // 将预测结果生成下一条“虚拟 K 线”，用于滚动预测
            KlineWithIndicators nextK = new KlineWithIndicators();
            nextK.setOpen(latest.getClose());
            nextK.setHigh(latest.getClose());
            nextK.setLow(latest.getClose());
            nextK.setClose(p.label == 1 ? latest.getClose() * 1.001 : latest.getClose() * 0.999);
            nextK.setVolume(latest.getVolume());
            nextK.setEma7(latest.getEma7());
            nextK.setEma21(latest.getEma21());
            nextK.setMacd(latest.getMacd());
            nextK.setSignal(latest.getSignal());
            nextK.setHistogram(latest.getHistogram());
            nextK.setBollUpper(latest.getBollUpper());
            nextK.setBollMiddle(latest.getBollMiddle());
            nextK.setBollLower(latest.getBollLower());
            sb.append(String.format("第%d根: prob涨=%.4f, pred=%d\n", i + 1, p.prob, p.label));
            rollingData.add(nextK);
        }
        String string = sb.toString();
        System.out.println(sb.toString());
        return string;
    }
    /**
     * 按时间顺序切分数据集
     * @param data 原始 K 线数据（按时间升序）
     * @param trainRatio 训练集比例，例如 0.7
     * @param validRatio 验证集比例，例如 0.15
     */
    public static Map<String, List<KlineWithIndicators>>  splitTimeSeriesMap(List<KlineWithIndicators> data, double trainRatio, double validRatio) {
        int total = data.size();
        int trainSize = (int) (total * trainRatio);
        int validSize = (int) (total * validRatio);
        // 切分集合
        List<KlineWithIndicators> trainList = data.subList(0, trainSize);
        List<KlineWithIndicators> validList = data.subList(trainSize, trainSize + validSize);
        List<KlineWithIndicators> testList  = data.subList(trainSize + validSize, total);
        // 打印大小
        System.out.println("数据切分 -> train=" + trainList.size() +
                ", valid=" + validList.size() + ", test=" + testList.size());
        // 构造 Map
        Map<String, List<KlineWithIndicators>> map = new HashMap<>();
        map.put("trainList", trainList);
        map.put("validList", validList);
        map.put("testList", testList);
        return map;
    }
    public static void main(String[] args) throws Exception {
        getResultXGBoostKlinePredictor("","");
    }


    // 工具方法：更新 EMA
    private static double updateEma(double prevEma, double price, int period) {
        double alpha = 2.0 / (period + 1);
        return prevEma * (1 - alpha) + price * alpha;
    }

    // 工具方法：预测未来 N 根
    public static List<Integer> predictFutureNWithBacktest(Booster booster,
                                                           List<KlineWithIndicators> data,
                                                           int window,
                                                           int predictN,
                                                           double probThreshold) throws XGBoostError {

        List<KlineWithIndicators> slidingWindow = new ArrayList<>(data.subList(data.size() - window, data.size()));
        List<Integer> resultList = new ArrayList<>();
        double balance = 0.0;
        boolean holding = false;

        for (int step = 0; step < predictN; step++) {
            // === 1. 构造特征向量 ===
            float[] flatFeatures = new float[window * 6];
            for (int i = 0; i < window; i++) {
                KlineWithIndicators k = slidingWindow.get(i);
                int offset = i * 6;
                flatFeatures[offset] = (float) k.getClose();
                flatFeatures[offset + 1] =  k.getEma7().floatValue();
                flatFeatures[offset + 2] =  k.getEma21().floatValue();
                flatFeatures[offset + 3] =  k.getMacd().floatValue();
                flatFeatures[offset + 4] =  k.getBollUpper().floatValue();
                flatFeatures[offset + 5] =  k.getBollLower().floatValue();
            }

            // === 2. 构造 DMatrix ===
            DMatrix dmat = new DMatrix(flatFeatures, 1, flatFeatures.length, Float.NaN);

            // === 3. 预测 ===
            float[][] preds = booster.predict(dmat);
            float prob = preds[0][0];
            int label = prob > probThreshold ? 1 : 0;
            resultList.add(label);
            System.out.println("第 " + step + " 根预测概率 = " + prob + " | 预测标签 = " + label);

            // === 4. 生成下一根 K 线并更新特征 ===
            double lastClose = slidingWindow.get(window - 1).getClose();
            double nextClose = lastClose * (1 + (label == 1 ? 0.002 : -0.002)); // 模拟涨跌 0.2%

            // 更新 EMA
            double lastEma7 = slidingWindow.get(window - 1).getEma7();
            double lastEma21 = slidingWindow.get(window - 1).getEma21();
            double ema7 = lastEma7 * (6.0 / 7.0) + nextClose * (1.0 / 7.0);
            double ema21 = lastEma21 * (20.0 / 21.0) + nextClose * (1.0 / 21.0);

            // 更新 MACD
            double diff = ema7 - ema21;
            double lastMacd = slidingWindow.get(window - 1).getMacd();
            double dea = lastMacd * 0.8 + diff * 0.2;
            double macd = diff - dea;

            // 更新 BOLL
            List<Double> closes = slidingWindow.stream().map(KlineWithIndicators::getClose).collect(Collectors.toList());
            closes.add(nextClose);
            if (closes.size() > window) closes.remove(0);
            double mean = closes.stream().mapToDouble(c -> c).average().orElse(nextClose);
            double std = Math.sqrt(closes.stream().mapToDouble(c -> Math.pow(c - mean, 2)).average().orElse(0));
            double bollUpper = mean + 2 * std;
            double bollLower = mean - 2 * std;

            // 构造下一根 K 线
            KlineWithIndicators nextK = new KlineWithIndicators();
            nextK.setTimestamp(System.currentTimeMillis());
            nextK.setOpen(lastClose);
            nextK.setHigh(Math.max(lastClose, nextClose));
            nextK.setLow(Math.min(lastClose, nextClose));
            nextK.setClose(nextClose);
            nextK.setEma7(ema7);
            nextK.setEma21(ema21);
            nextK.setMacd(macd);
            nextK.setBollUpper(bollUpper);
            nextK.setBollLower(bollLower);

            // === 5. 回测逻辑 ===
            if (label == 1 && !holding) {
                holding = true; // 开仓
            } else if (label == 0 && holding) {
                balance += (nextClose - lastClose) / lastClose; // 平仓结算收益
                holding = false;
            }

            // 滑动窗口更新
            slidingWindow.remove(0);
            slidingWindow.add(nextK);
        }

        // 平仓剩余持仓
        if (holding) {
            double lastClose = slidingWindow.get(slidingWindow.size() - 1).getClose();
            double prevClose = slidingWindow.get(slidingWindow.size() - 2).getClose();
            balance += (lastClose - prevClose) / prevClose;
        }

        System.out.println("未来 " + predictN + " 根预测序列收益: " + (balance * 100) + "%");
        return resultList;
    }



    // 生成特征和标签
    // 生成 DMatrix
    // 生成 DMatrix
    // 生成 DMatrix
    public static DMatrix generateDMatrix(List<KlineWithIndicators> data, int start, int end, int window, double futureThreshold) throws XGBoostError {
        int nSamples = end - start - window - 1;
        if (nSamples <= 0) throw new IllegalArgumentException("样本数 <= 0，请检查 start/end/window 参数");
        int nFeatures = window * 6; // close + ema7 + ema21 + macd + bollUpper + bollLower

        float[] feature1D = new float[nSamples * nFeatures];
        float[] labelArray = new float[nSamples];

        // 简单归一化 close
        double[] closeArray = data.stream().mapToDouble(KlineWithIndicators::getClose).toArray();
        double closeMin = Arrays.stream(closeArray).min().orElse(0);
        double closeMax = Arrays.stream(closeArray).max().orElse(1);

        for (int i = start + window; i < end - 1; i++) {
            for (int j = 0; j < window; j++) {
                KlineWithIndicators k = data.get(i - window + j);
                int baseIndex = (i - start - window) * nFeatures + j * 6;
                feature1D[baseIndex + 0] = (float)((k.getClose() - closeMin) / (closeMax - closeMin + 1e-8));
                feature1D[baseIndex + 1] = k.getEma7().floatValue();
                feature1D[baseIndex + 2] = k.getEma21().floatValue();
                feature1D[baseIndex + 3] =  k.getMacd().floatValue();
                feature1D[baseIndex + 4] =  k.getBollUpper().floatValue();
                feature1D[baseIndex + 5] =  k.getBollLower().floatValue();
            }

            double futureChange = (data.get(i + 1).getClose() - data.get(i).getClose()) / data.get(i).getClose();
            labelArray[i - start - window] = futureChange >= futureThreshold ? 1f : 0f;
        }

        DMatrix dmatrix = new DMatrix(feature1D, nSamples, nFeatures, Float.NaN);
        dmatrix.setLabel(labelArray);
        return dmatrix;
    }
    /**
     * 将 K 线数据转为 XGBoost 特征数组（增强版）
     * 包含原始价格、技术指标，以及涨跌差分/变化率等
     * @param k 当前 K 线
     * @param  前一根 K 线，可为空
     * @return 特征数组
     */
    private static float[] buildFeatureRowEnhanced(KlineWithIndicators k) {
        double open = k.getOpen();
        double close = k.getClose();
        double high = k.getHigh();
        double low = k.getLow();

        double body = Math.abs(close - open);
        double range = Math.max(1e-6, high - low);
        double upperShadow = high - Math.max(open, close);
        double lowerShadow = Math.min(open, close) - low;

        return new float[]{
                toFloat((close - open) / open),     // 涨跌幅
                toFloat(body / range),              // 实体占比
                toFloat(upperShadow / range),       // 上影线占比
                toFloat(lowerShadow / range),       // 下影线占比
                toFloat(k.getMacd()),               // MACD
                toFloat(k.getVolume())              // 成交量
        };
    }
    /**
     * 增量训练 Booster
     */
    public static void incrementalTrain(List<KlineWithIndicators> trainList,
                                        List<KlineWithIndicators> validList,
                                        int numRound) throws XGBoostError {
        if (trainList == null || trainList.isEmpty()) return;

        // 构建 DMatrix
        Dataset trainDs = buildDataset(trainList);
        Dataset validDs = buildDataset(validList);

        DMatrix dTrain = matrixFrom(trainDs, 0, trainDs.nRows);
        DMatrix dValid = matrixFrom(validDs, 0, validDs.nRows);

        Map<String, Object> params = new HashMap<>();
        params.put("objective", "binary:logistic");
        params.put("eta", 0.1);
        params.put("max_depth", 6);

        // 如果已有全局 Booster，则继续增量训练
        if (boosterGlobal != null) {
            boosterGlobal.update(dTrain, numRound);
        } else {
            Map<String, DMatrix> watches = new HashMap<>();
            watches.put("train", dTrain);
            watches.put("valid", dValid);
            boosterGlobal = XGBoost.train(dTrain, params, numRound, watches, null, null);
        }
    }

    private static List<KlineWithIndicators> loadKlineData() throws Exception {
        List<KlineWithIndicators> klineWithIndicators = ExcelKlineReader.readExcelToKlines(DATA_DIR+"/ETHUSDT_kline_1h.csv");
        return klineWithIndicators;
    }

}
