package com.shujia.spark.streaming

import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Durations, StreamingContext}

object Demo6Reciver {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setAppName("receiverKafka")
      .setMaster("local[4]")


    /**
      * 创建streaming上下文对象，指定batch的间隔时间，多久计算一次
      */

    val ssc: StreamingContext =  new StreamingContext(conf,Durations.seconds(5))

    val topics = Map("test_topic2" -> 3)

    /**
      * 通过receiver模式连接kafka
      */

    val kafkaParams: Map[String, String] = Map[String,String](
      "zookeeper.connect" -> "master:2181,node1:2181,node2:2181",
      "group.id" -> "gIdass",
      "auto.offset.reset" -> "smallest",
      "enable.auto.commit" -> "true",
      "auto.commit.interval.ms" -> "10000"
    )

    val kafkaDS: ReceiverInputDStream[(String, String)] = KafkaUtils.createStream[String,String,StringDecoder,StringDecoder](
      ssc,
      kafkaParams,
      topics,
      StorageLevel.MEMORY_AND_DISK_2
    )

    kafkaDS.print()

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()


  }

}
