package com.atguigu.realtime.app

import com.atguigu.realtime.util.MyKafkaUtil
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}
// 同时消费多个topic, 得到多个流  一个topic对应一个流
abstract class BaseAppV2 {
    val appName: String
    val master: String
    val batchTime: Int
    val groupId: String
    val topic: Set[String]  // 定义多个topic, 会得到多个流
    
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf().setAppName(appName).setMaster(master)
        val ssc: StreamingContext = new StreamingContext(conf, Seconds(batchTime))
        val topicAndStream: Map[String, DStream[String]] = topic.map(topic => {
            (topic, MyKafkaUtil.getKafkaStream(ssc, groupId, topic))
        }).toMap
       
        run(ssc, topicAndStream)
        
        ssc.start()
        ssc.awaitTermination()
    }
    
    def run(ssc: StreamingContext, topicAndStream: Map[String, DStream[String]])
}
