package com.atguigu.realtime.apps

import com.atguigu.realtime.utils.DStreamUtil
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream

/**
 * Created by Smexy on 2022/6/25
 */
object TestApp extends BaseApp {

  override var appName: String = "TestApp"
  override var groupId: String = "TestApp"
  override var topic: String = "topicA"
  override var batchDuration: Int = 5

  def main(args: Array[String]): Unit = {

    context=new StreamingContext("local[*]",appName,Seconds(batchDuration))

    runApp{

      //②获取流
      val ds: InputDStream[ConsumerRecord[String, String]] = DStreamUtil.createDS(context, groupId, topic)

      //③运算 + 输出
      ds.map(record => record.value()).print()

    }

  }
}
