package ApplicationTest.Example.KafKa

import ApplicationStart.SparkService.SparkService
import ApplicationTest.Example.KafKa.SimpleCreateDirecty.streamingContext
import jdk.internal.util.xml.impl.Input
import kafka.common.TopicAndPartition
import kafka.utils.{ZKGroupTopicDirs, ZkUtils}
import org.I0Itec.zkclient.ZkClient
import org.apache.hadoop.yarn.lib.ZKClient
import org.apache.kafka.clients.consumer.{ConsumerRecord, ConsumerRecords}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, KafkaUtils, OffsetRange}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Milliseconds, StreamingContext}
import org.springframework.beans.factory.annotation.Autowired


object LocalSteamingTest2 {

  private val conf = new SparkConf().setMaster("local[*]").setAppName("Scala Spark Test Application")
  private val zkQuorum = "master:2181,spark02:2181,spark03:2181,spark04:2181" //zk server


  def main(args: Array[String]): Unit = {

    //key Word Count
    lazy val stream = new StreamingContext(conf, Milliseconds(5000))

    val groupId = "group1"

    val kafkaParams = Map[String, Object](
      "metadata.broker.list" -> "master:9092",
      "bootstrap.servers" -> "master:9092,spark02:9092,spark03:9092", //连接两台kafka 服务 -- 关于zookeeper 发现
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> groupId, //分配一个组
      "auto.offset.reset" -> "latest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    //关于这个话题组
    val topic = "helloword"
    val topics = Array(topic).toSet //本地集群决定 topic

    //创建了一个关于topics and group path for zk
    val topicDirs = new ZKGroupTopicDirs(groupId, topic)
    //consumer offsetDir
    val zkTopicConsumerDir = s"${topicDirs.consumerOffsetDir}"

    //crate client -- 通过zkQuorum
    val zkClient = new ZkClient(zkQuorum)

    //查询该路径是否存在chlidren -- 远程获取子节点个数
    val children = zkClient.countChildren(zkTopicConsumerDir)


    //偏移量组
    var fromoffsets : Map[TopicAndPartition, Long] = Map()

    if(children > 0) {
      for (i <- 0 until children) {
        //从zk 获得偏移量
        val partitionOffset = zkClient.readData[String](s"$zkTopicConsumerDir/$i")
        val tp = TopicAndPartition(topic, i) //关于topic + 分区
        fromoffsets += (tp -> partitionOffset.toLong) //信息长度解决
      }
    }

    var kafkaStream = KafkaUtils.createDirectStream[String, String](stream, PreferConsistent, Subscribe[String, String](topics, kafkaParams))

    var offsetRanges = Array[OffsetRange]()
    kafkaStream.transform{ rdd =>
      offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      rdd
    }.foreachRDD{
      rdd =>
        rdd.foreachPartition(
          partition => partition.foreach{ //每一个分区
            rdd =>
              val time = rdd.timestamp()
              println(rdd.key())
              println(rdd.value())
          }
        )
    }

    stream.start()
    stream.awaitTermination()
  }
}
