package com.atguigu.realtime

import com.atguigu.realtime.util.{MyKafkaUtil, OffsetManager}
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.json4s.CustomSerializer
import org.json4s.JsonAST.{JDouble, JInt, JLong, JString}

import scala.collection.mutable.ListBuffer

/**
 * Author atguigu
 * Date 2020/11/16 9:25
 *
 * 把多个topic放在一个流中
 */
abstract class BaseAppV2 {
    val master: String
    val appName: String
    val groupId: String
    val topics: Seq[String] // 同时消费多个topic
    val bachTime: Int
    
    val toLong = new CustomSerializer[Long](ser = format => ( {
        case JString(s) => s.toLong
        case JInt(s) => s.toLong
    }, {
        case s: Long => JLong(s)
    }))
    val toDouble = new CustomSerializer[Double](format => ( {
        case JString(s) => s.toDouble
        case JDouble(s) => s.toDouble
    }, {
        case s: Double => JDouble(s)
    }))
    
    
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setMaster(master).setAppName(appName)
        val ssc: StreamingContext = new StreamingContext(conf, Seconds(bachTime))
        
        val offsets: Map[TopicPartition, Long] = OffsetManager.readOffsets(groupId, topics)
        val offsetRanges: ListBuffer[OffsetRange] = ListBuffer.empty[OffsetRange]
        val sourceStream = MyKafkaUtil
            .getKafkaStream(ssc, groupId, topics, offsets)
            .transform(rdd => {
                offsetRanges.clear()
                val newOffsetRanges: Array[OffsetRange] = rdd
                    .asInstanceOf[HasOffsetRanges].offsetRanges
                offsetRanges ++= newOffsetRanges //driver
                rdd
            })
            .map(record => (record.topic(), record.value()))
        
        run(ssc, sourceStream, offsetRanges)
        
        ssc.start()
        ssc.awaitTermination()
    }
    
    def run(ssc: StreamingContext,
            sourceStream: DStream[(String, String)], // _1: topic _2: 数据
            offsetRanges: ListBuffer[OffsetRange])
    
    
}
