package saic.spark_stream

import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka.KafkaUtils

/**
 * @author ZhiLi
 */
object SessionStream {
  def main(args: Array[String]): Unit = {
    // Create context with 2 second batch interval
    val sparkConf = new SparkConf().setAppName("Test").setMaster("local")

    val ssc = new StreamingContext(sparkConf, Seconds(2))

    val topics = Set("water2").toSet

    val kafkaParams = Map[String, String](
      "metadata.broker.list"        -> "broker:port,broker:port",
      "bootstrap.servers"           -> "broker:port,broker:port",
      "group.id"                    -> "somegroup",
      "auto.commit.interval.ms"     -> "1000",
      "key.deserializer"            -> "org.apache.kafka.common.serialization.StringDeserializer",
      "value.deserializer"          -> "org.apache.kafka.common.serialization.StringDeserializer",
      "auto.offset.reset"           -> "earliest",
      "enable.auto.commit"          -> "true"
    )

    val messages = KafkaUtils.createDirectStream(ssc, kafkaParams, topics)
    messages.foreachRDD(rdd => {
      if (rdd.count() >= 1) {
        rdd.map(x => print(x))
        println(rdd.getClass)
      }
    })
    ssc.start()
    ssc.awaitTermination()
  }
}