package com.atguigu.realtime

import com.atguigu.realtime.util.{MyKafkaUtil, OffsetManager}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, OffsetRange}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.json4s.CustomSerializer
import org.json4s.JsonAST.{JDouble, JInt, JLong, JString}

import scala.collection.mutable.ListBuffer

/**
 * Author atguigu
 * Date 2020/11/16 9:25
 *
 * 同时消费多个topic, 每个topic单独流
 */
abstract class BaseAppV3 {
    val master: String
    val appName: String
    val groupId: String
    val topics: Seq[String] // 同时消费多个topic
    val bachTime: Int
    
    val toLong = new CustomSerializer[Long](ser = format => ( {
        case JString(s) => s.toLong
        case JInt(s) => s.toLong
    }, {
        case s: Long => JLong(s)
    }))
    val toDouble = new CustomSerializer[Double](format => ( {
        case JString(s) => s.toDouble
        case JDouble(s) => s.toDouble
    }, {
        case s: Double => JDouble(s)
    }))
    
    
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setMaster(master).setAppName(appName)
        val ssc: StreamingContext = new StreamingContext(conf, Seconds(bachTime))
        val offsetRanges: Map[String, ListBuffer[OffsetRange]] = topics.map(topic => (topic, ListBuffer.empty[OffsetRange])).toMap
        // 存储是多个topic的offsets
        val offsets: Map[TopicPartition, Long] = OffsetManager.readOffsets(groupId, topics)
        val sourceStreams = topics
            .map(topic => {
                val currentTopicOffsets: Map[TopicPartition, Long] = offsets.filter(_._1.topic() == topic)
                val stream: DStream[ConsumerRecord[String, String]] = MyKafkaUtil
                    .getKafkaStream(ssc, groupId, topic, currentTopicOffsets)
                    .transform(rdd => { // 单独清空每个topic的ListBuffer
                        offsetRanges(topic).clear()
                        val newOffsetRanges: Array[OffsetRange] = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
                        offsetRanges(topic) ++= newOffsetRanges
                        rdd
                    })
                (topic, stream)
            })
            .toMap
        
        run(ssc, sourceStreams, offsetRanges)
        
        ssc.start()
        ssc.awaitTermination()
    }
    
    def run(ssc: StreamingContext,
            sourceStreams: Map[String, DStream[ConsumerRecord[String, String]]],
            offsetRanges: Map[String, ListBuffer[OffsetRange]])
    
    
}
