package com.atguigu.sparkstreaming.apps

import com.atguigu.sparkstreaming.utils.DStreamUtil
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Created by Smexy on 2022/7/18
 */
object BaseAppTestApp extends BaseApp {
  override var groupId: String = "BaseAppTestApp"
  override var topic: String = "topicA"
  override var appName: String = "BaseAppTestApp"
  override var batchDuration: Int = 10

  /*
      --master yarn --class com.atguigu.sparkstreaming.apps.BaseAppTestApp
      参数会默认放在SparkConf中
   */
  def main(args: Array[String]): Unit = {

    val sparkconf: SparkConf = new SparkConf().setAppName(appName)

    //重写context
    context = new StreamingContext(sparkconf,Seconds(batchDuration))

    runApp{

      val ds: InputDStream[ConsumerRecord[String, String]] = DStreamUtil.createDStream(groupId, context, topic)

      ds.map(record => record.value()).print(10000)


    }


  }
}
