package com.event.streaming.common.straming
import java.util.Properties
import com.event.streaming.config.setttings.StreamingConsumerSettings
import scala.reflect.runtime.universe.{TypeTag, typeOf}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.{Duration, StreamingContext}
import org.apache.kafka.common.TopicPartition
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

import scala.reflect.ClassTag

/**
  * @tparam KDS key deserializer type
  * @tparam VDS value deserializer type
  * @group 组id
  */
@SerialVersionUID(2012314123L)
abstract class StreamingConsumer[KDS:TypeTag,VDS:TypeTag](group:String) extends Serializable { self : StreamingConsumerSettings =>
  //目标topic
  def topic:String

  //start
  def start[K:ClassTag,V:ClassTag](spark:SparkSession,process:(SparkSession,RDD[(K,V)]) => Unit):Unit = {
    if (spark ==null || spark.sparkContext == null)
      throw new Exception("参数异常")
    if (streamingInterval == null)
      throw new Exception("interval没有初始化")

    val ssCtx = new StreamingContext(spark.sparkContext,Duration(streamingInterval.toMillis))
    //创建 stream
    try {
//      create[K,V](ssCtx,this.topic,group)
//        .map{cr:ConsumerRecord[K,V] => (cr.key(),cr.value())}
//        .window(Duration(windowLength.toMillis),Duration(slidingInterval.toMillis))
//        .cache()
//        .foreachRDD(rdd => process(spark,rdd))
      create[K,V](ssCtx,topic,group)
          .foreachRDD(rdd => process(spark,rdd.map{ cr:ConsumerRecord[K,V] => (cr.key(),cr.value())}))

      //start
      ssCtx.start()
      ssCtx.awaitTermination()
    }finally {
      ssCtx.stop()
    }

  }


  //创建 input-stream
  private def create[K:ClassTag,V:ClassTag](ssc:StreamingContext,topic:String,group:String):InputDStream[ConsumerRecord[K,V]] = {
    //parameters
    val initKafkaParams = Map(
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> kafkaBrokerUrl,
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> typeOf[KDS].typeSymbol.fullName,
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> typeOf[VDS].typeSymbol.fullName,
      ConsumerConfig.GROUP_ID_CONFIG -> group,
      ConsumerConfig.AUTO_OFFSET_RESET_CONFIG -> "earliest")

    //check
    val kafkaParams = this.schemaRegistryUrl match {
      case Some(url) => {
//        import io.confluent.kafka.serializers.
        import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig
        initKafkaParams ++ Map(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG -> "true", "schema.registry.url" -> schemaRegistryUrl)
      }
      case _ => initKafkaParams
    }

    //check
    if (this.debugEnabled) {
      //reset offset
      resetOffset(kafkaParams)
    }
    KafkaUtils.createDirectStream[K,V](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[K,V](Array(this.topic),kafkaParams)
    )

  }

  //没有运行
  def window[K:ClassTag,V:ClassTag](spark:SparkSession,process:(SparkSession,RDD[(K,V)]) => Unit):Unit = {
    val ssCtx = new StreamingContext(spark.sparkContext,Duration(streamingInterval.toMillis))
    try {
      create[K,V](ssCtx,topic,group)
        .map{cr:ConsumerRecord[K,V] => (cr.key(),cr.value())}
        .window(Duration(windowLength.toMillis),Duration(slidingInterval.toMillis))
        .cache()
        .foreachRDD(rdd => process(spark,rdd))

      //start
      ssCtx.start()
      ssCtx.awaitTermination()
    }
    finally {
      ssCtx.stop()
    }
  }

  private def resetOffset(kafkaParams:Map[String,Object]):Unit = {
    val properties = (new Properties /: kafkaParams){case (p,(k,v)) => p.put(k,v);p}
    import org.apache.kafka.clients.consumer.KafkaConsumer
    import java.util.Arrays
    val consumer = new KafkaConsumer[String,String](properties)
    try {
      consumer.subscribe(Arrays.asList[String](Array(this.topic):_*))
      consumer.poll(100L)
      //set the beginning
      consumer.seekToBeginning(Arrays.asList(Array (new TopicPartition(this.topic,0)):_*))
    }finally {
      consumer.close()
    }
  }

}
