import java.util

import com.xnmzdx.test.RedisUtil
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.JavaConverters.mapAsScalaMapConverter
import scala.collection.JavaConverters._
import scala.util.Try

/**
 * Created by wm on 2020/3/30.
 */
object KafkaSparkStreamingDemo {
  /**
   * 根据groupId保存offset
   * @param ranges
   * @param groupId
   */
  def storeOffset(ranges: Array[OffsetRange], groupId: String): Unit = {
    for (o <- ranges) {
      val key = s"bi_kafka_offset_${groupId}_${o.topic}_${o.partition}"
      val value = o.untilOffset
      RedisUtil.getRedis.set(key, value.toString)
    }
  }

  /**
   * 根据topic，groupid获取offset
   * @param topics
   * @param groupId
   * @return
   */
  def getOffset(topics: Array[String], groupId: String): (Map[TopicPartition, Long], Int) = {
    val fromOffSets = scala.collection.mutable.Map[TopicPartition, Long]()
    topics.foreach(topic => {
      val keys: util.Set[String] = RedisUtil.getRedis.keys(s"bi_kafka_offset_${groupId}_${topic}*")
      //val keys = RedisUtil.getRedis.keys(s"bi_kafka_offset_${groupId}_${topic}*")
      if (!keys.isEmpty) {
        keys.asScala.foreach(key => {
          val offset = RedisUtil.getRedis.get(key)
          val partition = Try(key.split(s"bi_kafka_offset_${groupId}_${topic}_").apply(1)).getOrElse("0")
          fromOffSets.put(new TopicPartition(topic, partition.toInt), offset.toLong)
        })
      }
    })
    if (fromOffSets.isEmpty) {
      (fromOffSets.toMap, 0)
    } else {
      (fromOffSets.toMap, 1)
    }
  }

  /**
   * 创建InputDStream，如果auto.offset.reset为latest则从redis读取
   * @param ssc
   * @param topic
   * @param kafkaParams
   * @return
   */
  def createStreamingContextRedis(ssc: StreamingContext, topic: Array[String],
                                  kafkaParams: Map[String, Object]): InputDStream[ConsumerRecord[String, String]] = {
    var kafkaStreams: InputDStream[ConsumerRecord[String, String]] = null
    val groupId = kafkaParams.get("group.id").get
    val (fromOffSet, flag) = getOffset(topic, groupId.toString)
    val offsetReset = kafkaParams.get("auto.offset.reset").get
    var locationStrategy = LocationStrategies.PreferConsistent
    if (flag == 1 && offsetReset.equals("latest")) {
      kafkaStreams = KafkaUtils.createDirectStream(ssc, locationStrategy,
        ConsumerStrategies.Subscribe(topic, kafkaParams, fromOffSet))
    } else {
      kafkaStreams = KafkaUtils.createDirectStream(ssc, locationStrategy,
        ConsumerStrategies.Subscribe(topic, kafkaParams))
    }
    kafkaStreams
  }
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("KafkaSparkStreamingDemo")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    val ssc = new StreamingContext(sc,Seconds(5))
    val brokers = "spark:9092"
    val topic = Array("topic-demo")
    val groupId = "sparkGroup"
    val KafkaParams = Map(
      "bootstrap.servers"-> brokers,
      "key.deserializer" ->classOf[StringDeserializer],
      "value.deserializer"->classOf[StringDeserializer],
      "group.id"->groupId,
      "auto.offset.reset"-> "latest",
      "enable.auto.commit" ->(false:java.lang.Boolean)
    )
    val resultDstream: InputDStream[ConsumerRecord[String, String]] = createStreamingContextRedis(ssc,topic,KafkaParams)

    resultDstream.foreachRDD(iter =>{
      if(iter.count() > 0){
        iter.foreach(record =>{
          val value = record.value()
          println(value)
        })
        val ranges: Array[OffsetRange] = iter.asInstanceOf[HasOffsetRanges].offsetRanges
        storeOffset(ranges, groupId)
        //        resultDstream.asInstanceOf[CanCommitOffsets].commitAsync(ranges)
      }
    })

    ssc.start()
    ssc.awaitTermination()
  }
}


