package com.ibm.cps.spark.streaming

import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.api.java.StorageLevels
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Created by telekinesis on 4/27/15.
 */
object KafkaReadTest {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("KafkaReadTest")
    val ssc = new StreamingContext(conf, Seconds(10))
    ssc.checkpoint(".")
    val kafkaParams: Map[String, String]
    = Map(
      "zookeeper.connect" -> "9.186.88.253:2181",
      "group.id" -> "groupId",
      "zookeeper.session.timeout.ms" -> "400",
      "zookeeper.sync.time.ms" -> "200",
      "auto.commit.interval.ms" -> "1000"
    )

    val topicMap : Map[String, Int]
    = Map(
      "TEST_SPARK_DATA" -> 1,
      "TEST_SPARK_METADATA" -> 1
    )

    val dataDStream = KafkaUtils.createStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topicMap, StorageLevels.MEMORY_AND_DISK)
    dataDStream.print()
    ssc.start()
    ssc.awaitTermination()
  }
}
