package com.shellyan.gmall.rt.app

import com.shellyan.gmall.rt.util.MyKafkaUtil
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @author Shelly An
 * @create 2020/9/7 9:26
 *        用抽象类和特质都可以
 */
abstract class Base2App {
  val master:String
  val appName:String
  val batchTime:Int
  val topics:Set[String]
  val groupId:String
  var ssc:StreamingContext = _

  def run(streams: Map[String, DStream[String]] ):Unit


  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster(master).setAppName(appName)
    ssc = new StreamingContext(conf, Seconds(batchTime))

    //2. 从kafka得到多个stream = topics数量
    val streams: Map[String, DStream[String]] = topics.map(topic => {
      (topic,MyKafkaUtil.getKafkaStream(ssc, groupId, Set(topic)))
    }).toMap

    run(streams)

    //5. 启动上下文
    ssc.start()
    //6. 阻塞
    ssc.awaitTermination()

  }

}
