package com.dtkavin.sparkstreaming

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Milliseconds, StreamingContext}

/**
  * Created by IntelliJ IDEA.
  * Programmer : John Zn
  * Date : 2016/4/19 0019
  * Time : 14:28
  * Discribtion : 对每个RDD批次进行处理的WordCount
  */
class WordCountBatch {

}

object WordCountBatch {
  def main(args: Array[String]) {
    SelfLogging.getSelfLoggingLevel("WARN")
    /*
    * 本地模式线程数必须》=2，否则只有接收线程，没有处理线程
    * */
    val conf = new SparkConf().setAppName("WordCountBatch").setMaster("local[3]")
    val ssc = new StreamingContext(conf, Milliseconds(2000))
    ssc.checkpoint("D:/data/checkpoint/wordcount")
    val res = ssc.textFileStream("D:\\data\\input-index")
      .flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)
    //    val res = ssc.textFileStream("D:/data/input-index/a.txt").flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _)
    res.print()

    ssc.start()
    ssc.awaitTermination()
  }
}
