package com.shellyan.gmall.rt.app

import com.alibaba.fastjson.JSON
import com.atguigu.realtime.gmall.common.Constant
import com.shellyan.gmall.rt.bean.StartupLog
import com.shellyan.gmall.rt.util.MyKafkaUtil
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @author Shelly An
 * @create 2020/9/7 9:26
 *        用抽象类和特质都可以
 */
abstract class BaseApp {
  val master:String
  val appName:String
  val batchTime:Int
  val topics:Set[String]
  val groupId:String

  def run(sourceStream:DStream[String]):Unit


  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster(master).setAppName(appName)
    val ssc = new StreamingContext(conf, Seconds(batchTime))

    //2. 从kafka得到一个stream
    val sourceStream: DStream[String] =
      MyKafkaUtil.getKafkaStream(ssc, groupId, topics)


    run(sourceStream)

    //5. 启动上下文
    ssc.start()
    //6. 阻塞
    ssc.awaitTermination()

  }

}
