package com.unis.DataParsing

import com.unis.utils.{FastJsonUtil, KafkaZkUtils, PhoenixUtil}
import kafka.utils.ZkUtils
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.security.JaasUtils
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.TaskContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, KafkaUtils}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent

import scala.collection.mutable.ListBuffer

object ManClass {
  val zkUtils = ZkUtils.apply("192.168.1.25:2181,192.168.1.26:2181,192.168.1.27:2181", 30000, 30000, JaasUtils.isZkSecurityEnabled())
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("ManClass").getOrCreate()
    val ssc = new StreamingContext(spark.sparkContext, Seconds(5))
    val group="consumer_spark"
    var arr =ListBuffer[Object]()
    val topics = Array("test")
    val kafkaParams = scala.collection.Map[String, Object](
      "bootstrap.servers" -> "192.168.1.25:6667,192.168.1.26:6667,192.168.1.27:6667",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> group,
      "auto.offset.reset" -> "latest",
      "partition.assignment.strategy" -> "org.apache.kafka.clients.consumer.RangeAssignor",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )
    val topic=topics(0)
    //程序启动时先读取zk上的kafka最新偏移量
    val stream = if (zkUtils.pathExists(s"/${topic}_${group}")) {
      val data = zkUtils.readData(s"/${topic}_${group}")._1.split(",")
      val newOffset = Map(new TopicPartition(data(0), data(1).toInt) -> data(2).toLong)
      KafkaUtils.createDirectStream[String, String](ssc, PreferConsistent, Subscribe[String, String](Array(topic), kafkaParams,newOffset))
    } else {
      zkUtils.createPersistentPath(s"/${topic}_${group}")
      KafkaZkUtils.setData(zkUtils,s"/${topic}_${group}",s"${topic}")
      val data = zkUtils.readData(s"/${topic}_${group}")._1.split(",")
      val newOffset = Map(new TopicPartition(data(0).toString, data(1).toInt) -> data(2).toLong)
      KafkaUtils.createDirectStream[String, String](ssc, PreferConsistent, Subscribe[String, String](Array(topic), kafkaParams, newOffset))
    }
    stream.foreachRDD(rdd=>{
      val offsetRanges=rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      val data =rdd.map(_.value()).map(rdd=>{
        val details=FastJsonUtil.DataParsing(rdd)
        details
      }).flatMap(f=>{
        f
      }).foreach(f=>{
        val data = f.toString.replace("List(","").replace(")","").split(",").toList
        val sql = "upsert into DeviceDetails values (?,?,?,?,?,?,?,?,?)"
        PhoenixUtil.SaveToPhoenix(sql,data)
      })
      rdd.foreachPartition(x=>{
        val o= offsetRanges(TaskContext.get.partitionId)
        //更新zk的中的偏移量
        zkUtils.updatePersistentPath(s"/${o.topic}_${group}", s"${o.topic},${o.partition},${o.untilOffset}")
      })
    })
    ssc.start()
    ssc.awaitTermination()
  }

}
