import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * Created by Administrator on 2016/3/17.
  */
object TestSparkStreaming {

  val conf = new SparkConf()
    .setAppName("TestSparkStreaming")
    .setMaster("local[2]")

  val ssc = new StreamingContext(conf, Seconds(5))

  def main(args: Array[String]) {
    testSparkStreaming()
  }

  def testSparkStreaming() = {
    ssc.checkpoint("checkpoint")

    val topics = "test".split(",").map((_, 1)).toMap
    val lines = KafkaUtils.createStream(ssc, "192.168.44.129:2181", "group", topics).map(_._2)
    lines.flatMap(_.split("\\s")).map((_, 1L))
      .reduceByKey(_ + _).print()

    //    lines.flatMap(_.split("\\s")).map((_, 1L))
    //      .reduceByKeyAndWindow(_ + _, _ - _, Minutes(1), Seconds(5), 2)

    ssc.start()
    ssc.awaitTermination()
  }

}
