package com.arnold.test.demos.wordcounts

import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by arnold.zhu on 2017/7/17.
  */
object WordCounts {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("Spark01Learn")
    val sc = new StreamingContext(conf, Seconds(15))

    val text = sc.textFileStream("/Users/arnold.zhu/files/1.txt")
    val words = text.flatMap(x => {
      x.split("")
    })
    //  map形成一个Array 然后根据Key聚合  Array[A,1],Array[A,2]
    val wordsCount = words.map(x => (x, 1)).reduceByKey(_ + _)
    wordsCount.print()
    // 启动Streaming
    sc.start()
    sc.awaitTermination()
  }

}
