package com.edata.bigdata.streaming.kafka

import com.edata.bigdata.annotations.Edata_Consumer
import com.edata.bigdata.streaming.Consumer
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Duration, Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies, OffsetRange}

import scala.collection.mutable
import scala.collection.mutable.Map

@Edata_Consumer(target = "StreamingKafkaConsumer")
class SKConsumer[K, V] extends Consumer[K, V] with SKConnector {
  override var session: SparkSession = _
  override var ds: DStream[ConsumerRecord[K, V]] = _
  override var sc: StreamingContext = _
  override var startingOffset: mutable.Map[TopicPartition, Long] = Map[TopicPartition, Long]()
  override var offsetsOfStream: Array[OffsetRange] = _
  override var batchDuration: Duration = _
  var KF_CSM_GROUP_ID: String = "direct"
  var KF_CSM_TOPIC: String = "TOPIC_A,TOPIC_B"


  override def createDataStream(): Unit = {
    sc = new StreamingContext(session.sparkContext, Seconds(5))

    //此处需要指定Map的[String,Object]类型，否则报错
    val kafkaParams = Map[String, Object](
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> KF_CSM_BOOTSTRAP,
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> (false: java.lang.Boolean),
      ConsumerConfig.GROUP_ID_CONFIG -> KF_CSM_GROUP_ID
    )
    ds = KafkaUtils.createDirectStream[K, V](
      sc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[K, V](KF_CSM_TOPIC.split(","), kafkaParams, startingOffset)
    ).transform { rdd =>
      offsetsOfStream = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      rdd
    }
  }

  override def setStartingOffset(topic: String, partition: Int, offset: Long): Unit = {
    val offsetMap = Map[TopicPartition, Long]()
    offsetMap.put(new TopicPartition(topic, partition), offset)
    startingOffset = offsetMap
  }

  override def start(): Unit = {
    sc.start()
    sc.awaitTermination()
  }
}
