package me.chendan.streaming

import kafka.serializer.StringDecoder
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.SparkConf


class ReadingFromKafka {

}

object ReadingFromKafka extends App{
  val conf = new SparkConf().setAppName("KafkaDemo")
  val sc = new StreamingContext(conf, Seconds(5))
  sc.checkpoint("/tmp")
  val kafkaStream = KafkaUtils.createStream(sc, "localhost:2181", "bidding-group",
    Map[String, Int]("bidding" -> 0, "bidding" -> 1), StorageLevel.DISK_ONLY)
  val lines = kafkaStream.map(x => x._2)
  val wordCounts = lines.flatMap(x => x.split(" ")).map(w => (w, 1)).reduceByKey((a, b) => a + b)
  wordCounts.print()

  sc.start()
  sc.awaitTermination()
}
