package com.tzg157.fitness.service;

import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.ForeachPartitionFunction;
import org.apache.spark.ml.Pipeline;
import org.apache.spark.ml.PipelineModel;
import org.apache.spark.ml.PipelineStage;
import org.apache.spark.ml.evaluation.RegressionEvaluator;
import org.apache.spark.ml.feature.StandardScaler;
import org.apache.spark.ml.feature.VectorAssembler;
import org.apache.spark.ml.recommendation.ALSModel;
import org.apache.spark.mllib.tree.model.RandomForestModel;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.springframework.stereotype.Service;

import java.util.*;
import java.util.stream.Collectors;

import static org.apache.spark.sql.types.DataTypes.DoubleType;
import static org.apache.spark.sql.types.DataTypes.IntegerType;

@Service
public class RecommenderServiceImpl {
    public List<Long> getRecommenderData(Long userId,Long courseId) {
        // 创建 SparkConf
        SparkConf conf = new SparkConf().setAppName("HiveLoadDataServiceImpl").setMaster("local[4]");
        // 创建 JavaStreamingContext
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1));

        // 创建 SparkSession
        SparkSession spark = SparkSession.builder()
                .config(jssc.sparkContext().getConf())
                .enableHiveSupport()
                .config("hive.metastore.uris", "thrift://localhost:9083")
                .getOrCreate();
        spark.sparkContext().setLogLevel("INFO");
        // 加载已保存的模型
        String modelPath = "/opt/hive/models/als_model";
        ALSModel model = ALSModel.load(modelPath);
        System.out.println("模型已加载: " + modelPath);

        // 定义 schema
        StructType schema = new StructType(new StructField[]{
                DataTypes.createStructField("uid", DataTypes.LongType, false),
                DataTypes.createStructField("cid", DataTypes.LongType, false)
        });
        Dataset<Row> testDataPoint = spark.createDataFrame(
                    Arrays.asList(RowFactory.create(userId,courseId) // 示例数据点
                ), schema);
        // 使用模型进行推荐
        Dataset<Row> userRecs = model.recommendForUserSubset(testDataPoint, 10);
        // 显示推荐结果
        userRecs.show(false);
        List<Long> retList = new ArrayList<>();
        userRecs.collectAsList().forEach(r->{
            List<GenericRowWithSchema> recommendationList = r.getList(1);
            recommendationList.forEach(row->{
                int tagId = row.getInt(0);
                retList.add((long)tagId);
            });
        });
        spark.stop();
        return retList;
    }

    public Map<String,Number> getPlanRecommenderData(Map<String,Number> data) {
        System.setProperty("HADOOP_USER_NAME","root");
        // 创建 SparkConf
        SparkConf conf = new SparkConf().setAppName("HiveLoadDataServiceImpl").setMaster("local[4]");
        // 创建 JavaStreamingContext
        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1));

        // 创建 SparkSession
        SparkSession spark = SparkSession.builder()
                .config(jssc.sparkContext().getConf())
                .enableHiveSupport()
                .config("hive.metastore.uris", "thrift://localhost:9083")
                .getOrCreate();
        spark.sparkContext().setLogLevel("WARN");
        // 创建 SparkConf
        StructType schema = new StructType(new StructField[]{
                DataTypes.createStructField("age", IntegerType, true),
                DataTypes.createStructField("gender", IntegerType, true),
                DataTypes.createStructField("weight", DoubleType, true),
                DataTypes.createStructField("height", DoubleType, true),
                DataTypes.createStructField("max_bpm", IntegerType, true),
                DataTypes.createStructField("avg_bpm", IntegerType, true),
                DataTypes.createStructField("resting_bpm", IntegerType, true),
                DataTypes.createStructField("session_duration", DoubleType, true),
                DataTypes.createStructField("fat_percentage", DoubleType, true),
                DataTypes.createStructField("water_intake", DoubleType, true),
                DataTypes.createStructField("workout_frequency", IntegerType, true),
                DataTypes.createStructField("experience_level", IntegerType, true),
                DataTypes.createStructField("bmi", DoubleType, true)
        });

        Dataset<Row> testDataPoint = spark.createDataFrame(
                Arrays.asList(
                        RowFactory.create(data.get("age"), data.get("gender"), data.get("weight"), data.get("height"), 0, 0, 0, 0.0, 0.0, 0.0, 0, 0,0.0) // 示例数据点
                ),
                schema
        );

//            RandomForestModel.load()

        Map<String,Number> retMap = new HashMap<>();
        // 预测卡路里消耗
        PipelineModel modelCalories = PipelineModel.load("/opt/hive/models/random_forest/calories");
        Dataset<Row> predictionsCalories = modelCalories.transform(testDataPoint);
        System.out.println(predictionsCalories.collectAsList());
        Row predictionRowCalories = predictionsCalories.head();
        double predictedCalories = predictionRowCalories.getDouble(predictionRowCalories.fieldIndex("prediction"));
        System.out.println("预测卡路里消耗: " + predictedCalories);
        retMap.put("calories",predictedCalories);

        // 预测每次训练时长
        PipelineModel modelDuration = PipelineModel.load("/opt/hive/models/random_forest/duration");
        Dataset<Row> predictionsDuration = modelDuration.transform(testDataPoint);
        Row predictionRowDuration = predictionsDuration.head();
        double predictedDuration = predictionRowDuration.getDouble(predictionRowDuration.fieldIndex("prediction"));
        System.out.println("预测每次训练时长: " + predictedDuration);
        retMap.put("duration",predictedDuration);

        // 预测每周训练次数
        PipelineModel modelFrequency = PipelineModel.load("/opt/hive/models/random_forest/frequency");
        Dataset<Row> predictionsFrequency = modelFrequency.transform(testDataPoint);
        Row predictionRowFrequency = predictionsFrequency.head();
        double predictedFrequency = predictionRowFrequency.getDouble(predictionRowFrequency.fieldIndex("prediction"));
        System.out.println("预测每周训练次数: " + predictedFrequency);
        retMap.put("frequency",predictedFrequency);

        // 预测锻炼期间心率
        PipelineModel modelHeartRate = PipelineModel.load("/opt/hive/models/random_forest/heartRate");
        Dataset<Row> predictionsHeartRate = modelHeartRate.transform(testDataPoint);
        Row predictionRowHeartRate = predictionsHeartRate.head();
        double predictedHeartRate = predictionRowHeartRate.getDouble(predictionRowHeartRate.fieldIndex("prediction"));
        System.out.println("预测锻炼期间心率: " + predictedHeartRate);
        retMap.put("heartRate",predictedHeartRate);

        // 预测健身方法推荐
        PipelineModel modelWorkoutType = PipelineModel.load("/opt/hive/models/random_forest/workoutType");
        Dataset<Row> predictionsWorkoutType = modelWorkoutType.transform(testDataPoint);
        Row predictionRowWorkoutType = predictionsWorkoutType.head();
        double predictedWorkoutType = predictionRowWorkoutType.getDouble(predictionRowWorkoutType.fieldIndex("prediction"));
        System.out.println("预测健身方法推荐: " + predictedWorkoutType);
        retMap.put("workoutType",predictedWorkoutType);

        spark.stop();
        System.out.println(retMap);
        return retMap;
    }

    public static void main(String[] args) {
        RecommenderServiceImpl hiveLoadDataService = new RecommenderServiceImpl();
        Map<String,Number> data = new HashMap<>();
        data.put("age",35);
        data.put("gender",1);
        data.put("weight",75.0);
        data.put("height",1.78);
        data.put("max_bpm",0);
        data.put("avg_bpm",0);
        data.put("resting_bpm",0);
        data.put("session_duration",0.0);
        data.put("fat_percentage",0.0);
        data.put("water_intake",0.0);
        data.put("workout_frequency",0);
        data.put("experience_level",1);
        data.put("bmi", 0.0);
        hiveLoadDataService.getPlanRecommenderData(data);
    }
}
