package com.gizwits.kafkaConsumer

import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka._
import org.apache.spark.{SparkConf, SparkContext}
object KafkaWordCountConsumer {
  def main(args: Array[String]) {
    //val Array(zkQuorum, group, topics, numThreads) = args
    val zkQuorum = "s60:2181"
    val group = "1"
    val topics = "test"
    val numThreads = 2
    val sparkConf = new SparkConf().setAppName("KafkaWordCount")
    val sc = new SparkContext(sparkConf)
    val ssc = new StreamingContext(sc, Seconds(1))
    ssc.checkpoint(".")
    /**
     *  jssc: JavaStreamingContext,
      zkQuorum: String,
      groupId: String,
      topics: JMap[String, JInt]
     */
    val topicpMap = topics.split(",").map((_,numThreads)).toMap
    val lines = KafkaUtils.createStream(ssc,zkQuorum,group,topicpMap)

    val words = lines.map(_._2).flatMap(_.split(" "))
    val pairs = words.map(word => (word, 1))
   // val wordCounts = pairs.reduceByKey(_ + _)
    val wordCounts = words.map(x => (x, 1L))
     .reduceByKeyAndWindow(_ + _, _ - _, Minutes(10), Seconds(2), 2)
    wordCounts.print()

  //define updateWindow function
  val updateFunc = (values: Seq[Int], state: Option[Int]) => {
      val currentCount = values.sum

      val previousCount = state.getOrElse(0)

      Some(currentCount + previousCount)
    }

    val stateDstream = pairs.updateStateByKey((values: Seq[Int], state: Option[Int]) => {
      val currentCount = values.sum

      val previousCount = state.getOrElse(0)

      Some(currentCount + previousCount)
    })
    stateDstream.print()


    ssc.start()
    ssc.awaitTermination()

  }



  }
