package com.xahj.one

import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Duration, Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * Created with IntelliJ IDEA.
 *
 * @Author xw
 * @Date 2022/6/21
 * @Time 12:09
 *
 * */



class SparkStreamingJobTemplate {
  var spark: SparkSession = _
  var sc: SparkContext = _
  var ssc: StreamingContext = _

  def init(
            batchDuration: Duration = Seconds(2),
            conf: SparkConf = new SparkConf().setAppName("demo").setMaster("local[2]")
          ): Unit = {
    this.spark = SparkSession.builder()
      .config(conf)
      .getOrCreate()

    this.sc = spark.sparkContext
    this.ssc = new StreamingContext(sc, batchDuration)

  }

}

class SparkStreamingKafkaJobTemplate extends SparkStreamingJobTemplate {
  def getRecordFromKafka[K, V](
                                topics: Iterable[String],
                                consumerProps: Map[String, String]
                              ): DStream[ConsumerRecord[K, V]] = {
    KafkaUtils.createDirectStream[K, V](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[K, V](
        topics,
        consumerProps
      )
    )
  }
}