package com.atguigu.realtime.apps

import com.atguigu.realtime.utils.DStreamUtil
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Created by Smexy on 2022/8/26
 *
 *    代码的可读性强，利于维护
 */
object TestDemo extends BaseApp {

  override var batchDuration: Int = 5
  override var appName: String = "TestDemo"
  override var groupId: String = "220409realtime"
  override var topic: String = "topicD"

  def main(args: Array[String]): Unit = {

    //重写父类中的context
    context = new StreamingContext("local[*]",appName,Seconds(batchDuration))

    runSparkStreamingApp{

      val ds: InputDStream[ConsumerRecord[String, String]] = DStreamUtil.getDStream(context, groupId, topic)

      ds.map(record => record.value()).print(10000)

    }

  }
}
