package org.zjt.spark.kafkaTest

import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * DESC    接收、并处理kafka的信息
  *
  * @author
  * @create 2017-05-17 上午11:24
  **/
object WindowReceiveKafka extends App{

  val config = new SparkConf().setMaster("local[2]").setAppName("ReceiveKafka")
  val ssc = new StreamingContext(config, Seconds(2))


  // TODO: 有状态的操作，必须要有checkpoint保存信息。
  ssc.checkpoint("./checkpoint")
  val kafkaParams = Map[String, String]("metadata.broker.list" -> "centos:9092")
  val messages = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, Set("test"))

    // TODO: 接收到kafka的消息:（topic,message）
    .flatMap(_._2.split(" ")).map((_,1))
    .reduceByKeyAndWindow(_+_,_-_,Seconds(30),Seconds(10))
    .print()

  ssc.start()
  ssc.awaitTermination()
  ssc.stop()

}
