import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.sql.execution.streaming.CommitMetadata.format
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.KafkaUtils
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.json4s.jackson.Serialization.write

import java.util.HashMap

object Spark_Kafka2 {
  def main(args: Array[String]): Unit = {

    System.setProperty("hadoop.home.dir", "D:\\codes\\jar-file\\hadoop-2.7.3")
    System.setProperty("HADOOP_USER_NAME", "root")
    val group = "niit005"
    val topic = "sou3"
    val conf = new SparkConf().setMaster("local[*]").setAppName("sparkKafka")
    val ssc = new StreamingContext(conf, Seconds(10))
    ssc.checkpoint("cp")

    // spark streaming 一系列 rdd ，基于内存 计算， checkpoint
    ssc.checkpoint("./checkpoint")
    // 设置 日志 级别 ，error
    ssc.sparkContext.setLogLevel("error")

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "niit1:9092",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> group,
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    // 订阅topic
    // topic name
    val topicName = Array(topic)

    val streamRDD = KafkaUtils.createDirectStream[String, String](
      ssc,
      // 位置策略
      PreferConsistent,
      // 订阅
      Subscribe[String, String](topicName, kafkaParams)
    )

    val lines = streamRDD.map(_.value())
    lines.map((_, 1)).updateStateByKey(
      (seq: Seq[Int], buff: Option[Int]) => {
        val newCount = buff.getOrElse(0) + seq.sum
        Option(newCount)
      }

    ).foreachRDD(
      rdd => {
        if (!rdd.isEmpty()) {
          // producer 功能 写入到 kafka
          val kafkaParams = new HashMap[String, Object]()
          kafkaParams.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "niit1:9092")
          kafkaParams.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
          kafkaParams.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

          val producer = new KafkaProducer[String, String](kafkaParams)

          val rddValue = rdd.reduceByKey(_ + _).collect


          rddValue.foreach(println)

          val count_data = write(rddValue)
          println(count_data)
          val producerRecord = new ProducerRecord[String, String]("sou6", null, count_data)

          producer.send(producerRecord)

        }
      }

    )
    // 开启接收器
    ssc.start()
    ssc.awaitTermination()
    //ssc.stop()

  }
}
