package com.example.homework1

import com.example.homework1.MyKafkaConsumer.getKafkaConsumerParams
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author lbq
 * @date 2022/2/20 21:46
 * @version 1.0
 */
object MyKafkaConsumer2 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getCanonicalName)
    val sc: SparkContext = new SparkContext(conf)

    val ssc: StreamingContext = new StreamingContext(sc, Seconds(10))
    sc.setLogLevel("warn")

    // 定义kafka相关参数
    val kafkaParams: Map[String, Object] = getKafkaConsumerParams("consumerC")
    val topics: Array[String] = Array("topic_B1")


    val dStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream(
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics,
        kafkaParams)
    )

    dStream.foreachRDD(records => {
      records.foreach(record => println(s"**********key: ${record.key()}, value: ${record.value()}**********"))
    })


    ssc.start()
    ssc.awaitTermination()
  }
}
