package com.offcn.bigdata.spark.streaming.p1

import org.apache.spark.SparkConf
import org.apache.spark.api.java.StorageLevels
import org.apache.spark.streaming.dstream.{DStream, InputDStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * SparkStreaming加载hdfs目录下的数据
  *     监听目录中的新增文件
  */
object _02StreamingWithHDFSApp {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf().setMaster("local").setAppName("StreamingWithHDFS")
        val ssc = new StreamingContext(conf, Seconds(2))


//        val lines: DStream[String] = ssc.textFileStream("file:/E:/data/minitor/")

        val lines: DStream[String] = ssc.textFileStream("hdfs://ns1/data/spark/monitor")
        val pet = lines.flatMap(_.split("\\s+")).map((_, 1)).reduceByKey(_+_)

        pet.print()

        ssc.start()
        ssc.awaitTermination()
    }
}
