package com.sub.spark.streaming.instance;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import org.apache.spark.streaming.kafka010.LocationStrategy;
import org.jetbrains.annotations.NotNull;

import java.util.ArrayList;
import java.util.HashMap;

/**
 * @ClassName SparkKafkaInstance
 * @Description: TODO
 * @Author Submerge.
 * @Since 2025/6/13 16:50
 * @Version 1.0
 */

public class SparkKafkaStreamingInstance {

    public static void main(String[] args) throws InterruptedException {


        SparkConf sparkConf = new SparkConf().setAppName("sub-spark-kak-streaming").setMaster("local[2]");

        JavaStreamingContext jsc = new JavaStreamingContext(sparkConf, new Duration(10*1000));

        // 创建配置参数
        HashMap<String, Object> kafkaParams = getKafkaConsumerConfig();

        // 需要消费的主题
        ArrayList<String> topics = new ArrayList<>();
        //topics.add("sulent-frame-web-str");
        topics.add("sub-spark-kafka");

        JavaInputDStream<ConsumerRecord<String, String>> directStream = KafkaUtils.createDirectStream(jsc, LocationStrategies.PreferBrokers(), ConsumerStrategies.<String, String>Subscribe(topics, kafkaParams));



        //directStream.inputDStream().print();

        //
        System.out.println("=================================");

/*        directStream.map(new Function<ConsumerRecord<String, String>, String>() {
            @Override
            public String call(ConsumerRecord<String, String> v1) throws Exception {
                return v1.value();
            }
        }).print(100);*/


        directStream.map((Function<ConsumerRecord<String, String>, Object>) ConsumerRecord::value).print(100);


        jsc.start();


        jsc.awaitTermination();

        //jsc.stop();



    }

    private static @NotNull HashMap<String, Object> getKafkaConsumerConfig() {
        HashMap<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
        kafkaParams.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        kafkaParams.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");

        kafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG,"sub-spark-kafka-streaming");
        kafkaParams.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        return kafkaParams;
    }
}
