package com.gmall.app

import com.gmall.utils.{Constants, MyKafkaUtil}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * <p>项目名称: 实时数仓项目 </p>
 * <p>文件名称: 基础抽象类 </p>
 * <p>描述: 提供通过的kafka数据源、ssc环境</p>
 * <p>创建时间: 2020/10/19</p>
 * @author <a href="mail to: 1034652894@qq.com" rel="nofollow">Demik Qin</a>
 * @version v1.0
 * @update [序号][日期YYYY-MM-DD] [更改人姓名][变更描述]
 */
abstract class BaseApp {
    val appName: String
    val master: String
    val batchTime: Int
    val groupId: String
    val topic: String

    def main(args: Array[String]): Unit = {
        // 1. 创建一个 StreamingContext
        val conf = new SparkConf().setAppName(appName).setMaster(master)
        val ssc: StreamingContext = new StreamingContext(conf, Seconds(batchTime))
        // 2. 从kafka获取流
        val sourceStream: DStream[String] = MyKafkaUtil.getKafkaStream(ssc, groupId, topic)

        // 3. 具体业务逻辑
        run(ssc, sourceStream)

        // 4. 启动上下文
        ssc.start()
        // 5. 阻止主线程退出, 防止流关闭
        ssc.awaitTermination()
    }

    def run(ssc: StreamingContext, sourceStream: DStream[String])
}
