package com.andnnl

import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.{HasOffsetRanges, KafkaUtils, OffsetRange}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext, TaskContext}

/**
  * Created by chenss on 2017/12/26.
  */
object SparkSteamingKafka {
    def main(args: Array[String]): Unit = {
        val brokers = "master:9092,slave1:9092,slave2:9092"
        val topics = "words"
        val conf = new SparkConf().setMaster("local[2]").setAppName("streaming word count")
        val sc = new SparkContext(conf)
        sc.setLogLevel("WARN")
        val ssc = new StreamingContext(sc, Durations.seconds(1))
        val topicsSet = topics.split(",").toSet

        val kafkaParams = Map[String, Object](
            "bootstrap.servers" -> brokers,
            "key.deserializer" -> classOf[StringDeserializer],
            "value.deserializer" -> classOf[StringDeserializer],
            "group.id" -> "use_a_separate_group_id_for_each_stream",
            "auto.offset.reset" -> "latest",
            "enable.auto.commit" -> (false:java.lang.Boolean)
        )

        //        val topics = Array("topicA", "topicB")
        val lines = KafkaUtils.createDirectStream[String, String](
            ssc,
            PreferConsistent,
            Subscribe[String, String](topicsSet, kafkaParams)
        )

        //        stream.map(record => (record.key, record.value))
        val counts = lines.map(_.value().toString).flatMap(_.split(" ")).map(x => (x, 1L)).reduceByKey(_ + _)
//        println(counts)
        counts.foreachRDD(rdd=>{
            rdd.foreach(println)
        })

        //打印offset
        lines.foreachRDD { rdd =>
            val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
            rdd.foreachPartition { iter =>
                val o: OffsetRange = offsetRanges(TaskContext.get.partitionId)
                println(s"${o.topic} ${o.partition} ${o.fromOffset} ${o.untilOffset}")
            }
        }

//          可以打印所有信息，看下ConsumerRecord的结构
//              lines.foreachRDD(rdd => {
//                  rdd.foreach(x => {
//                    System.out.println(x);
//                  });
//                });
        ssc.start()
        ssc.awaitTermination()
//        ssc.close()
    }
}