package com.atguigu.realtime

import com.atguigu.realtime.util.{MyKafkaUtil, OffsetManager}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.json4s.CustomSerializer
import org.json4s.JsonAST.{JDouble, JInt, JLong, JString}

import scala.collection.mutable.ListBuffer

/**
 * Author atguigu
 * Date 2020/11/16 9:25
 *
 * offset的读取位置换成了Mysql
 */
abstract class BaseAppV4 {
    val f = org.json4s.DefaultFormats
    
    val master: String
    val appName: String
    val groupId: String
    val topic: String
    val bachTime: Int
    
    val toLong = new CustomSerializer[Long](ser = format => ( {
        case JString(s) => s.toLong
        case JInt(s) => s.toLong
    }, {
        case s: Long => JLong(s)
    }))
    val toDouble = new CustomSerializer[Double](format => ( {
        case JString(s) => s.toDouble
        case JDouble(s) => s.toDouble
    }, {
        case s: Double => JDouble(s)
    }))
    
    
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setMaster(master).setAppName(appName)
        val ssc: StreamingContext = new StreamingContext(conf, Seconds(bachTime))
        
        val offsets = OffsetManager.readOffsetsFromMysql(groupId, topic)
        println("初始化的offset: " + offsets)
        val offsetRanges: ListBuffer[OffsetRange] = ListBuffer.empty[OffsetRange]
        
        val sourceStream: DStream[String] = MyKafkaUtil
            .getKafkaStream(
                ssc,
                groupId,
                topic,
                offsets)
            .transform(rdd => {
                // 读取到这次消费offset记录
                val newOffsetRanges: Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges // 强转, 必须是从kafka直接得到的那个流中的rdd
                offsetRanges.clear() // 每次向可变集合插入数据都时候, 先清空
                offsetRanges ++= newOffsetRanges //driver
                rdd
            })
            .map(_.value())
        
        // 具体业务逻辑
        run(ssc, sourceStream, offsetRanges)
        
        ssc.start()
        ssc.awaitTermination()
    }
    
    def run(ssc: StreamingContext,
            sourceStream: DStream[String],
            offsetRanges: ListBuffer[OffsetRange]): Unit
}
