//package com.tzg157.fitness.recommender;
//
//import org.apache.kafka.clients.consumer.ConsumerRecord;
//import org.apache.kafka.common.TopicPartition;
//import org.apache.kafka.common.serialization.StringDeserializer;
//import org.apache.spark.SparkConf;
//import org.apache.spark.ml.recommendation.ALSModel;
//import org.apache.spark.sql.Dataset;
//import org.apache.spark.sql.Row;
//import org.apache.spark.sql.RowFactory;
//import org.apache.spark.sql.SparkSession;
//import org.apache.spark.sql.types.DataTypes;
//import org.apache.spark.sql.types.StructField;
//import org.apache.spark.sql.types.StructType;
//import org.apache.spark.streaming.Durations;
//import org.apache.spark.streaming.api.java.JavaDStream;
//import org.apache.spark.streaming.api.java.JavaInputDStream;
//import org.apache.spark.streaming.api.java.JavaStreamingContext;
//import org.apache.spark.streaming.kafka010.ConsumerStrategies;
//import org.apache.spark.streaming.kafka010.KafkaUtils;
//import org.apache.spark.streaming.kafka010.LocationStrategies;
//
//import java.util.*;
//
//public class KafkaStreamingResolver {
//
//    public static final String TOPICS = "courseRate";
//
//    public static void main(String[] args) throws InterruptedException {
//        // 创建 SparkConf
//        SparkConf conf = new SparkConf().setAppName("KafkaALSRecommendation").setMaster("local[4]");
//        // 创建 JavaStreamingContext
//        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(1));
//
//        // 创建 SparkSession
//        SparkSession spark = SparkSession.builder()
//                .config(jssc.sparkContext().getConf())
//                .enableHiveSupport()
//                .config("hive.metastore.uris", "thrift://localhost:9083")
//                .getOrCreate();
//
//        // 加载已保存的模型
//        String modelPath = "/opt/hive/models/als_model";
//        ALSModel model = ALSModel.load(modelPath);
//        System.out.println("模型已加载: " + modelPath);
//
//        // Kafka 配置
//        Map<String, Object> kafkaParams = new HashMap<>();
//        kafkaParams.put("bootstrap.servers", "localhost:9092");
//        kafkaParams.put("key.deserializer", StringDeserializer.class);
//        kafkaParams.put("value.deserializer", StringDeserializer.class);
//        kafkaParams.put("group.id", "user_course_group_id");
//        kafkaParams.put("auto.offset.reset", "latest");
//        kafkaParams.put("enable.auto.commit", false);
//
//        // 订阅的 Kafka 主题
//        Collection<String> topics = Collections.singletonList("courseRate");
//
//        // 创建 Kafka DStream
//        JavaInputDStream<org.apache.kafka.clients.consumer.ConsumerRecord<String, String>> stream =
//                KafkaUtils.createDirectStream(
//                        jssc,
//                        LocationStrategies.PreferConsistent(),
//                        ConsumerStrategies.Subscribe(topics, kafkaParams)
//                );
//
//        // 处理接收到的数据
//        JavaDStream<Row> newDataStream = stream.flatMap(record -> {
//            String[] fields = record.value().split(",");
//            if (fields.length == 4) {
//                Row row = RowFactory.create(Integer.parseInt(fields[0]), Integer.parseInt(fields[1]));
//                return Arrays.asList(row).iterator();
//            }
//            return Collections.emptyIterator();
//        });
//        newDataStream.print();
//        // 定义 schema
//        StructType schema = new StructType(new StructField[]{
//                DataTypes.createStructField("uid", DataTypes.IntegerType, false),
//                DataTypes.createStructField("cid", DataTypes.IntegerType, false)
//        });
//
//        // 将 DStream 转换为 Dataset<Row>
//        newDataStream.foreachRDD(rdd -> {
//            if (!rdd.isEmpty()) {
//                Dataset<Row> newData = spark.createDataFrame(rdd, schema);
//
//                // 使用模型进行推荐
//                Dataset<Row> userRecs = model.recommendForUserSubset(newData, 10);
//
//                // 显示推荐结果
//                userRecs.show(false);
//            }
//        });
//
//        // 启动 StreamingContext
//        jssc.start();
//        jssc.awaitTermination();
//    }
//}
