package com.gmall.app


import com.gmall.utils.MyKafkaUtil
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * <p>文件名称: 基础抽象类V2 </p>
 * <p>描述: 同时消费多个topic，得到多个流，一个topic对应一个流 </p>
 * <p>创建时间: 2020/10/23 </p>
 * @author <a href="mail to: 1034652894@qq.com" rel="nofollow">Demik Qin</a>
 * @version v1.0
 * @update [序号][日期YYYY-MM-DD] [更改人姓名][变更描述]
 */
abstract class BaseAppV2 {
    val appName: String
    val master: String
    val batchTime: Int
    val groupId: String
    val topics: Set[String] // 定义多个topic, 会得到多个流

    def main(args: Array[String]): Unit = {
        // 1. 创建一个 StreamingContext
        val conf = new SparkConf().setAppName(appName).setMaster(master)
        val ssc: StreamingContext = new StreamingContext(conf, Seconds(batchTime))

        // 2. 从kafka获取流
        val topicAndStream: Map[String, DStream[String]] = topics.map(topic => {
            (topic, MyKafkaUtil.getKafkaStream(ssc, groupId, topic))
        }).toMap

        run(ssc, topicAndStream)

        ssc.start()
        ssc.awaitTermination()
    }

    def run(ssc: StreamingContext, topicAndStream: Map[String, DStream[String]])
}
