package com.speciality.demo.demos.service.impl;

import com.speciality.demo.demos.dto.AbnormalDetail;
import com.speciality.demo.demos.dto.PredictionResult;
import com.speciality.demo.demos.dto.StatsResponse;
import com.speciality.demo.demos.entity.WeatherDaily;
import com.speciality.demo.demos.entity.WeatherHourly;
import com.speciality.demo.demos.mapper.WeatherDailyMapper;
import com.speciality.demo.demos.mapper.WeatherHourlyMapper;
import com.speciality.demo.demos.service.WeatherPredictionService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.spark.ml.regression.RandomForestRegressionModel;
import org.apache.spark.ml.regression.RandomForestRegressor;
import org.apache.spark.sql.*;
import org.apache.spark.ml.regression.LinearRegression;
import org.apache.spark.ml.regression.LinearRegressionModel;
import org.apache.spark.ml.feature.VectorAssembler;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.springframework.stereotype.Service;

import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;

import static org.apache.spark.sql.functions.*;

@Slf4j
@Service
@RequiredArgsConstructor
public class WeatherPredictionServiceImpl implements WeatherPredictionService {
    private final WeatherDailyMapper weatherDailyMapper;
    private volatile SparkSession sparkSession;  // volatile保证多线程可见性

    // 时间格式器（统一日期时间处理）
    private static final DateTimeFormatter TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
    // 气象阈值（可根据实际地区调整）
    private static final double HIGH_TEMP_THRESHOLD = 35.0;  // 高温预警阈值
    private static final double LOW_TEMP_THRESHOLD = -5.0;   // 低温预警阈值
    private static final double HIGH_WIND_THRESHOLD = 15.0;  // 大风预警阈值
    private static final double HIGH_PRECIP_THRESHOLD = 50.0;// 暴雨预警阈值
    private double modelRMSE;  // 模型均方根误差（用于置信度计算）

    @Override
    public List<PredictionResult> getWeeklyPrediction(LocalDate inputDate) {
        log.info("获取下一周预测数据，输入日期: {}", inputDate);
        LocalDate startDate = inputDate.plusDays(1);
        LocalDate endDate = inputDate.plusDays(7);  // 修复：预测未来7天（符合weekly语义）
        return analyzeWithSpark(startDate, endDate);
    }

    @Override
    public StatsResponse getPredictionStats(LocalDate startDate,LocalDate endDate) {
        log.info("获取指定时间范围的异常统计，时间：{} ~ {}", startDate, endDate);

        // 1. 查询真实历史数据（通过 mapper 直接从数据库获取）
        List<WeatherDaily> historicalData = weatherDailyMapper.selectByDateRange(startDate, endDate);
        if (historicalData.isEmpty()) {
            return StatsResponse.builder()
                    .avgTemperature(0.0)
                    .abnormalCount(0)
                    .abnormalRate(0.0)
                    .abnormalDetails(Collections.emptyList())
                    .build();
        }

        // 2. 遍历历史数据，判断异常并统计
        List<AbnormalDetail> abnormalDetails = new ArrayList<>();
        long abnormalCount = 0;
        double totalTemperature = 0.0;
        int dataCount = historicalData.size();

        for (WeatherDaily daily : historicalData) {
            // 2.1 提取关键字段（与 judgeAbnormality 参数对齐）
            double temp = daily.getTemperature2mMax(); // 假设是“当日最高温”，需与业务一致
            double minTemp = daily.getTemperature2mMin();
            double windSpeed = daily.getWindspeed10mMax();
            double precipitation = daily.getPrecipitationSum();
            double snowfall = daily.getSnowfallSum();

            // 2.2 临时对象承载异常判断结果
            PredictionResult tempResult = new PredictionResult();
            judgeAbnormality(tempResult, temp,minTemp, windSpeed, precipitation, snowfall);

            // 2.3 统计异常数据
            totalTemperature += temp; // 累计温度，用于计算平均值
            if (tempResult.getIsAbnormal() == 1) {
                abnormalCount++;
                // 封装异常详情（真实日期、时间、原因等）
                AbnormalDetail detail = AbnormalDetail.builder()
                        .date(daily.getDate()) // WeatherDaily 的日期（LocalDate）
                        .time(daily.getDate().atTime(LocalTime.NOON).format(TIME_FORMATTER)) // 构造时间字符串
                        .reason(tempResult.getAnomalyReason())
                        .temperature(temp)
                        .riskLevel(tempResult.getRiskLevel())
                        .build();
                abnormalDetails.add(detail);
            }
        }

        // 3. 计算统计指标
        double avgTemperature = dataCount > 0 ? totalTemperature / dataCount : 0.0;
        double abnormalRate = dataCount > 0 ? (double) abnormalCount / dataCount : 0.0;

        // 4. 构建响应
        return StatsResponse.builder()
                .avgTemperature(avgTemperature)
                .abnormalCount((int) abnormalCount)
                .abnormalRate(abnormalRate)
                .abnormalDetails(abnormalDetails)
                .build();
    }

    @Override
    public List<PredictionResult> analyzeWithSpark(LocalDate startDate, LocalDate endDate) {
        log.info("使用Spark线性回归模型预测，时间范围: {} - {}", startDate, endDate);

        try {
            initializeSpark();

            // 1. 获取历史数据（去年同期，确保季节匹配）
            LocalDate histStart = startDate.minusYears(1);
            LocalDate histEnd = endDate.minusYears(1);
            List<WeatherDaily> historicalData = weatherDailyMapper.selectByDateRange(histStart, histEnd);

            if (historicalData.isEmpty()) {
                log.warn("历史数据为空（{}至{}），使用降级预测", histStart, histEnd);
                return generateFallbackPredictions(startDate, endDate);
            }

            // 2. 转换历史数据为Spark DataFrame并增强特征
            Dataset<Row> dailyDF = createEnhancedDailyDataFrame(historicalData);

            // 3. 训练线性回归模型
            LinearRegressionModel model = trainLinearRegressionModel(dailyDF);

            // 4. 生成真实预测结果
            return generateRealPredictions(model, startDate, endDate, dailyDF);

        } catch (Exception e) {
            log.error("Spark预测失败，使用降级方案", e);
            return generateFallbackPredictions(startDate, endDate);
        }
    }

    /**
     * 初始化Spark会话（线程安全的单例模式）
     */
    private void initializeSpark() {
        if (sparkSession == null) {
            synchronized (this) {  // 双重检查锁保证线程安全
                if (sparkSession == null) {
                    sparkSession = SparkSession.builder()
                            .appName("WeatherPrediction-LinearRegression")
                            .master("local[*]")
                            .config("spark.driver.memory", "4g")
                            .config("spark.sql.adaptive.enabled", "true")
                            .getOrCreate();
                    log.info("Spark会话初始化完成");
                }
            }
        }
    }

    /**
     * 构建增强特征的日级数据DataFrame（含时间特征）
     */
    private Dataset<Row> createEnhancedDailyDataFrame(List<WeatherDaily> data) {
        List<Row> rows = data.stream().map(daily -> RowFactory.create(
                daily.getDate().toString(),
                daily.getTemperature2mMax(),
                daily.getTemperature2mMin(),
                daily.getPrecipitationSum(),
                daily.getWindspeed10mMax(),
                daily.getSnowfallSum()
        )).collect(Collectors.toList());

        StructType baseSchema = new StructType(new StructField[]{
                new StructField("date", DataTypes.StringType, false, Metadata.empty()),
                new StructField("temp_max", DataTypes.DoubleType, true, Metadata.empty()),  // 预测目标
                new StructField("temp_min", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("precipitation", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("wind_speed", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("snowfall", DataTypes.DoubleType, true, Metadata.empty())
        });

        return sparkSession.createDataFrame(rows, baseSchema)
                .withColumn("month", month(to_date(col("date"))))  // 月份（1-12）
                .withColumn("is_weekend", when(
                        dayofweek(to_date(col("date"))).geq(6), 1
                ).otherwise(0))  // 是否周末（1=是）
                .withColumn("temp_range", col("temp_max").minus(col("temp_min")))  // 温差特征
                .na().drop();  // 移除空值行，避免训练异常
    }

    /**
     * 训练线性回归模型（拆分训练集和测试集，评估泛化能力）
     */
    private LinearRegressionModel trainLinearRegressionModel(Dataset<Row> dailyDF) {
        // 特征组装（训练与预测保持一致）
        VectorAssembler assembler = new VectorAssembler()
                .setInputCols(new String[]{"temp_min", "precipitation", "wind_speed", "month", "is_weekend", "temp_range"})
                .setOutputCol("features");
        Dataset<Row> featureDF = assembler.transform(dailyDF).select("features", "temp_max").na().drop();

        // 拆分训练集（80%）和测试集（20%），固定随机种子保证可复现
        Dataset<Row>[] splits = featureDF.randomSplit(new double[]{0.8, 0.2}, 42);
        Dataset<Row> trainDF = splits[0];
        Dataset<Row> testDF = splits[1];

        // 线性回归模型配置（正则化防止过拟合）
        LinearRegression lr = new LinearRegression()
                .setLabelCol("temp_max")
                .setFeaturesCol("features")
                .setMaxIter(100)
                .setRegParam(0.1)  // L2正则化
                .setElasticNetParam(0.5);  // 弹性网正则化

        // 训练模型
        LinearRegressionModel model = lr.fit(trainDF);

        // 评估模型（同时计算训练集和测试集误差）
        double trainRMSE = calculateRMSE(model.transform(trainDF), "temp_max", "prediction");
        double testRMSE = calculateRMSE(model.transform(testDF), "temp_max", "prediction");
        log.info("线性回归模型 - 训练集RMSE: {}, 测试集RMSE: {}", trainRMSE, testRMSE);
        this.modelRMSE = testRMSE;  // 使用测试集误差作为置信度计算依据

        log.info("线性回归系数: {}", Arrays.toString(model.coefficients().toArray()));
        log.info("线性回归截距: {}", model.intercept());

        return model;
    }

    /**
     * 计算均方根误差（RMSE）
     */
    private double calculateRMSE(Dataset<Row> predictions, String labelCol, String predictionCol) {
        Dataset<Row> errorDF = predictions.withColumn(
                "squared_error",
                pow(col(labelCol).minus(col(predictionCol)), 2)
        );
        double mse = errorDF.agg(avg("squared_error")).first().getDouble(0);
        return Math.sqrt(mse);
    }

    /**
     * 生成真实预测结果（修复特征不匹配问题）
     */
    private List<PredictionResult> generateRealPredictions(
            LinearRegressionModel model,
            LocalDate startDate,
            LocalDate endDate,
            Dataset<Row> historicalDF) {

        // 生成未来特征数据
        Dataset<Row> futureFeatures = generateFutureFeatures(startDate, endDate, historicalDF);

        // 特征组装（与训练时保持一致，移除多余的snowfall）
        VectorAssembler assembler = new VectorAssembler()
                .setInputCols(new String[]{"temp_min", "precipitation", "wind_speed", "month", "is_weekend", "temp_range"})
                .setOutputCol("features");
        Dataset<Row> featureDF = assembler.transform(futureFeatures).na().drop();

        // 模型预测
        Dataset<Row> predictionDF = model.transform(featureDF);

        // 解析预测结果
        return predictionDF.collectAsList().stream().map(row -> {
            LocalDate date = LocalDate.parse(row.getString(0));
            double predictedMaxTemp = row.getDouble(row.fieldIndex("prediction"));
            double minTemp = row.getDouble(1);
            double precipitation = row.getDouble(2);
            double windSpeed = row.getDouble(3);
            double snowfall = row.getDouble(4);  // 仅用于异常判断，不参与模型输入

            PredictionResult result = new PredictionResult();
            result.setTime(date.atTime(LocalTime.NOON).format(TIME_FORMATTER));
            result.setTemperature(round(predictedMaxTemp, 1));
            result.setHumidity(calculateHumidity(predictedMaxTemp, precipitation));
            result.setWindSpeed(round(windSpeed, 1));
            result.setPressure(calculatePressure(predictedMaxTemp));
            result.setSnowfall(round(snowfall, 1));

            // 判断异常天气
            judgeAbnormality(result, predictedMaxTemp,minTemp, windSpeed, precipitation, snowfall);

            // 计算置信度
            result.setConfidence(calculateDynamicConfidence(row, historicalDF));
            log.info("预测结果:{}",result);
            return result;
        }).collect(Collectors.toList());
    }

    /**
     * 生成未来日期的特征数据
     */
    private Dataset<Row> generateFutureFeatures(LocalDate startDate, LocalDate endDate, Dataset<Row> historicalDF) {
        // 计算历史统计特征
        Row histStats = historicalDF.agg(
                avg("temp_min").alias("avg_temp_min"),
                stddev("temp_min").alias("std_temp_min"),
                avg("precipitation").alias("avg_precip"),
                stddev("precipitation").alias("std_precip"),
                avg("wind_speed").alias("avg_wind"),
                stddev("wind_speed").alias("std_wind"),
                percentile_approx(col("precipitation"), lit(0.9), lit(1)).alias("p90_precip"),
                avg("temp_range").alias("avg_global_temp_range")  // 全局平均温差（用于空值兜底）
        ).first();

        // 提取统计值（处理可能的空值）
        double avgMinTemp = safeGetDouble(histStats, 0, 15.0);
        double stdMinTemp = safeGetDouble(histStats, 1, 5.0);
        double avgPrecip = safeGetDouble(histStats, 2, 10.0);
        double stdPrecip = safeGetDouble(histStats, 3, 8.0);
        double avgWind = safeGetDouble(histStats, 4, 5.0);
        double stdWind = safeGetDouble(histStats, 5, 3.0);
        double p90Precip = safeGetDouble(histStats, 6, 30.0);
        double globalAvgTempRange = safeGetDouble(histStats, 7, 10.0);

        // 计算每周几的降水概率模式
        Map<Integer, Double> dayOfWeekPrecipProb = calculateHistoricalDayOfWeekPattern(historicalDF);

        // 生成未来日期的特征行
        List<Row> futureRows = new ArrayList<>();
        LocalDate currentDate = startDate;
        while (!currentDate.isAfter(endDate)) {
            int dayOfWeek = currentDate.getDayOfWeek().getValue();
            int month = currentDate.getMonthValue();
            int isWeekend = (dayOfWeek >= 6) ? 1 : 0;

            // 生成合理的temp_min（基于历史均值和标准差）
            double tempMin = avgMinTemp + (Math.random() * 3 - 1.5) * stdMinTemp;
            tempMin = Math.max(avgMinTemp - 2 * stdMinTemp, Math.min(avgMinTemp + 2 * stdMinTemp, tempMin));

            // 生成降水数据（基于历史周模式）
            double precipProb = dayOfWeekPrecipProb.getOrDefault(dayOfWeek, 0.3);
            double precipitation = Math.random() < precipProb
                    ? Math.min(avgPrecip + (Math.random() * 2 - 1) * stdPrecip, p90Precip * 1.2)
                    : 0;
            precipitation = Math.max(0, precipitation);

            // 生成风速数据
            double windSpeed = avgWind + (Math.random() * 3 - 1.5) * stdWind;
            windSpeed = Math.max(0, Math.min(avgWind + 3 * stdWind, windSpeed));

            // 计算温差（带空值兜底）
            double tempRange = calculateTempRange(tempMin, historicalDF, globalAvgTempRange);

            futureRows.add(RowFactory.create(
                    currentDate.toString(), tempMin, precipitation, windSpeed,
                    0.0,  // snowfall（不参与模型输入，仅用于异常判断）
                    month, isWeekend, tempRange
            ));
            currentDate = currentDate.plusDays(1);
        }

        // 定义未来特征数据集的Schema
        StructType schema = new StructType(new StructField[]{
                new StructField("date", DataTypes.StringType, false, Metadata.empty()),
                new StructField("temp_min", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("precipitation", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("wind_speed", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("snowfall", DataTypes.DoubleType, true, Metadata.empty()),
                new StructField("month", DataTypes.IntegerType, true, Metadata.empty()),
                new StructField("is_weekend", DataTypes.IntegerType, true, Metadata.empty()),
                new StructField("temp_range", DataTypes.DoubleType, true, Metadata.empty())
        });

        return sparkSession.createDataFrame(futureRows, schema);
    }

    /**
     * 计算温差（修复空指针问题，增加全局默认值兜底）
     */
    private double calculateTempRange(double tempMin, Dataset<Row> historicalDF, double globalDefault) {
        Dataset<Row> tempRangeStats = historicalDF
                .where(col("temp_min").between(tempMin - 2, tempMin + 2))
                .agg(avg("temp_range").alias("avg_range"))
                .na().fill(globalDefault);  // 无匹配数据时用全局默认值

        return tempRangeStats.first().getDouble(0);
    }

    /**
     * 计算历史数据中每周几的降水概率
     */
    private Map<Integer, Double> calculateHistoricalDayOfWeekPattern(Dataset<Row> historicalDF) {
        Dataset<Row> dayOfWeekStats = historicalDF
                .withColumn("day_of_week", dayofweek(to_date(col("date"))))
                .withColumn("has_precip", col("precipitation").gt(0).cast(DataTypes.IntegerType))
                .groupBy("day_of_week")
                .agg(
                        avg("has_precip").alias("precip_prob"),
                        count("*").alias("total_days")
                )
                .na().fill(0.3, new String[]{"precip_prob"});  // 无数据时默认30%降水概率

        Map<Integer, Double> pattern = new HashMap<>();
        for (Row row : dayOfWeekStats.collectAsList()) {
            pattern.put(row.getInt(0), row.getDouble(1));
        }
        return pattern;
    }

    /**
     * 计算湿度（基于温度和降水）
     */
    private double calculateHumidity(double temp, double precipitation) {
        double humidity = 60 + (precipitation * 0.8) - (temp * 0.5);
        return round(Math.max(30, Math.min(90, humidity)), 1);
    }

    /**
     * 计算气压（基于温度）
     */
    private double calculatePressure(double temp) {
        double pressure = 1013 - (temp * 0.3);
        return round(Math.max(980, Math.min(1040, pressure)), 1);
    }

    /**
     * 判断异常天气并生成原因
     */
    private void judgeAbnormality(
            PredictionResult result,
            double temp,
            double minTemp,
            double windSpeed,
            double precipitation,
            double snowfall) {

        List<String> reasons = new ArrayList<>();

        if (minTemp > HIGH_TEMP_THRESHOLD) {
            reasons.add(String.format("高温预警（%.1f℃ > %.1f℃）", temp, HIGH_TEMP_THRESHOLD));
        } else if (temp < LOW_TEMP_THRESHOLD) {
            reasons.add(String.format("低温预警（%.1f℃ < %.1f℃）", temp, LOW_TEMP_THRESHOLD));
        }

        if (windSpeed > HIGH_WIND_THRESHOLD) {
            reasons.add(String.format("大风预警（%.1fm/s > %.1fm/s）", windSpeed, HIGH_WIND_THRESHOLD));
        }

        if (precipitation > HIGH_PRECIP_THRESHOLD) {
            reasons.add(String.format("暴雨预警（%.1fmm > %.1fmm）", precipitation, HIGH_PRECIP_THRESHOLD));
        }

        if (snowfall > 10) {
            reasons.add(String.format("暴雪预警（%.1fmm > 10mm）", snowfall));
        }

        if (!reasons.isEmpty()) {
            result.setIsAbnormal(1);
            if(reasons.size()>1){
                result.setRiskLevel("high");
            }else {
                result.setRiskLevel("medium");
            }

            result.setAnomalyReason(String.join("；", reasons));
        } else {
            result.setIsAbnormal(0);
            result.setRiskLevel("low");
            result.setAnomalyReason("气象条件正常");
        }
    }

    /**
     * 安全获取Double值（处理null或NaN）
     */
    private double safeGetDouble(Row row, int index, double defaultValue) {
        if (row.isNullAt(index)) {
            return defaultValue;
        }
        double value = row.getDouble(index);
        return Double.isNaN(value) ? defaultValue : value;
    }

    /**
     * 四舍五入保留指定位数小数
     */
    private double round(double value, int decimalPlaces) {
        double scale = Math.pow(10, decimalPlaces);
        return Math.round(value * scale) / scale;
    }

    /**
     * 降级预测方案（当Spark预测失败时使用）
     */
    private List<PredictionResult> generateFallbackPredictions(LocalDate startDate, LocalDate endDate) {
        log.info("执行降级预测，时间范围: {} - {}", startDate, endDate);
        List<PredictionResult> predictions = new ArrayList<>();
        LocalDate currentDate = startDate;

        // 基于历史均值的简单预测
        List<WeatherDaily> recentData = weatherDailyMapper.selectByDateRange(
                startDate.minusDays(30), startDate.minusDays(1));
        double avgTemp = recentData.stream()
                .mapToDouble(WeatherDaily::getTemperature2mMax)
                .average()
                .orElse(20.0);

        while (!currentDate.isAfter(endDate)) {
            PredictionResult result = new PredictionResult();
            result.setTime(currentDate.atTime(LocalTime.NOON).format(TIME_FORMATTER));
            result.setTemperature(round(avgTemp + (Math.random() * 4 - 2), 1));  // 均值±2℃波动
            result.setHumidity(round(60 + (Math.random() * 20 - 10), 1));
            result.setWindSpeed(round(5 + (Math.random() * 4 - 2), 1));
            result.setPressure(round(1013 + (Math.random() * 10 - 5), 1));
            result.setSnowfall(0.0);
            result.setIsAbnormal(0);
            result.setRiskLevel("low");
            result.setAnomalyReason("降级预测：气象条件正常");
            result.setConfidence(0.6);  // 降级预测置信度较低

            predictions.add(result);
            currentDate = currentDate.plusDays(1);
        }
        return predictions;
    }
    /**
     * 计算动态置信度：结合特征与历史数据的相似度 + 模型误差
     * 逻辑：
     * 1. 特征与历史数据越相似 → 置信度越高
     * 2. 模型测试集RMSE越小 → 置信度越高
     */
    private double calculateDynamicConfidence(Row currentRow, Dataset<Row> historicalDF) {
        // 1. 提取当前预测的关键特征（与模型输入特征一致）
        double currentTempMin = currentRow.getDouble(currentRow.fieldIndex("temp_min"));
        double currentPrecip = currentRow.getDouble(currentRow.fieldIndex("precipitation"));
        double currentWind = currentRow.getDouble(currentRow.fieldIndex("wind_speed"));
        int currentMonth = currentRow.getInt(currentRow.fieldIndex("month"));

        // 2. 计算与历史数据的特征相似度（基于欧氏距离）
        // 筛选历史数据中同月份的样本（季节相关性更高）
        Dataset<Row> sameMonthHist = historicalDF
                .where(col("month").equalTo(currentMonth))
                .select("temp_min", "precipitation", "wind_speed");

        if (sameMonthHist.isEmpty()) {
            // 无同月份历史数据，相似度默认0.5
            return calculateModelBasedConfidence(0.5);
        }

        // 计算当前特征与历史样本的欧氏距离（标准化特征避免量纲影响）
        Dataset<Row> distanceDF = sameMonthHist
                .withColumn("temp_min_norm", (col("temp_min").minus(currentTempMin)).divide(5)) // 温差标准化（假设5℃为单位）
                .withColumn("precip_norm", (col("precipitation").minus(currentPrecip)).divide(10)) // 降水量标准化（10mm为单位）
                .withColumn("wind_norm", (col("wind_speed").minus(currentWind)).divide(3)) // 风速标准化（3m/s为单位）
                .withColumn("euclidean_dist", sqrt(
                        pow(col("temp_min_norm"), 2)
                                .plus(pow(col("precip_norm"), 2))
                                .plus(pow(col("wind_norm"), 2))
                ));

        // 找到最小距离（最相似的历史样本）
        double minDistance = distanceDF.agg(min("euclidean_dist")).first().getDouble(0);

        // 距离→相似度（距离越小，相似度越高，范围0-1）
        double similarity = 1.0 / (1.0 + minDistance); // 用倒数转换，确保相似度随距离增大而减小


        // 3. 结合模型误差计算最终置信度
        return calculateModelBasedConfidence(similarity);
    }

    /**
     * 结合模型RMSE调整置信度
     * 模型误差越小，对相似度的权重越高
     */
    private double calculateModelBasedConfidence(double similarity) {
        // 基准RMSE（可根据业务调整，这里假设5.0为理想误差）
        double baseRMSE = 5.0;

        // 模型误差权重（RMSE越小，权重越高）
        double modelWeight = Math.max(0.3, Math.min(1.0, baseRMSE / Math.max(modelRMSE, 0.1)));

        // 最终置信度 = 相似度×模型权重 + 基础置信度×(1-模型权重)
        double baseConfidence = 0.6; // 基础置信度（避免极端值）
        double confidence = similarity * modelWeight + baseConfidence * (1 - modelWeight);

        // 限制置信度在0.3-0.95之间（避免不合理的极端值）
        return Math.max(0.3, Math.min(0.95, confidence));
    }
}
