package com.gitee.dufafei.spark.streaming

import com.gitee.dufafei.spark.streaming.offset.ZkOffset
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.InputDStream
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, HasOffsetRanges, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Duration, StreamingContext}

final class SparkStreamingBuilder() {

  // spark config
  var sparkConf: SparkConf = _
  // Kafka参数
  var kafkaParam: Map[String, Object] = _
  // Kafka主题
  var topics: Array[String] = _
  // Kafka集群的zk地址
  var zkServers: String = _
  // 批处理间隔 Milliseconds(1) Seconds(1) Minutes(1)
  var duration: Duration = _

  def setSparkConf(sparkConf: SparkConf): this.type = {
    this.sparkConf = sparkConf
    this
  }

  def setKafkaParam(kafkaParam: Map[String, Object]): this.type = {
    this.kafkaParam = kafkaParam
    this
  }

  def setTopics(topics: Array[String]): this.type = {
    this.topics = topics
    this
  }

  def setDuration(duration: Duration):  this.type = {
    this.duration = duration
    this
  }

  def setZkServers(zkServers: String):  this.type = {
    this.zkServers = zkServers
    this
  }

  def execute(process: InputDStream[ConsumerRecord[String,String]] => Unit): Unit = {
    val ssc = new StreamingContext(sparkConf, duration)
    execute(ssc)(process)
  }

  def execute(sparkSession: SparkSession)(process: InputDStream[ConsumerRecord[String,String]] => Unit): Unit = {
    val sc = sparkSession.sparkContext
    val ssc = new StreamingContext(sc, duration)
    execute(ssc)(process)
  }

  def execute(ssc: StreamingContext)(process: InputDStream[ConsumerRecord[String,String]] => Unit): Unit = {
    val zkUtils = ZkOffset(zkServers)
    val groupId = kafkaParam("group.id").toString
    val offsets = zkUtils.getBeginningOffset(topics, groupId)
    val input = KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](topics, kafkaParam, offsets)
    )
    process(input)
    input.foreachRDD{ rdd =>
      if(!rdd.isEmpty()) {
        val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
        zkUtils.setEndOffset(offsetRanges, groupId)
      }
    }

    ssc.start()
    ssc.awaitTermination()
  }
}

object SparkStreamingBuilder {

  def apply(): SparkStreamingBuilder = new SparkStreamingBuilder()
}
