package sparkstreaming.nineteenthday2.lesson3

import kafka.serializer.StringDecoder
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object KafkaTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(s"${this.getClass.getSimpleName}").setMaster("local[4]")
    conf.set("spark.serializer",  "org.apache.spark.serializer.KryoSerializer")
    val sc = new SparkContext(conf)
    val ssc: StreamingContext = new StreamingContext(sc, Seconds(2))
    ssc.checkpoint("hdfs://hadoop01:9000/streamingkafka")

    /**
      * K: ClassTag,
    V: ClassTag,
    KD <: Decoder[K]: ClassTag,
    VD <: Decoder[V]: ClassTag] (
      ssc: StreamingContext,
      kafkaParams: Map[String, String],
      topics: Set[String]
      */
    val kafkaParam = Map("metadata.broker.list" -> "hadoop01:9092")
    val topics = Set("auracategory")
    val kafkaDStream: DStream[String] = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParam, topics)
      .map(tuple => tuple._2)

    kafkaDStream.flatMap(_.split(","))
      .map((_,1))
      .reduceByKey(_+_).print()

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()
  }

}
