package com.spark.mooc.ch7_sparkstreaming.part01_DStream

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @description:
 * @time: 2020/11/29 13:39
 * @author: lhy
 */
object TestDStream {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("TestDStream").setMaster("local[2]")
        val ssc = new StreamingContext(conf,Seconds(10))
        /*
        基本的数据流：
            1.文件流 日志实时捕捉
            2.套接字流
            3.RDD队列流
         */
        // 1.创建文件流
        // (1)输入DStream定义输入源
        val lines: DStream[String] = ssc.textFileStream("input/streaming/logfile")
        // (2)拆分操作 处理数据
        val words: DStream[String] = lines.flatMap(_.split(" "))
        val wordCounts: DStream[(String, Int)] = words.map(x => (x,1)).reduceByKey(_+_)
        wordCounts.print()
        // (3)启动流计算
        ssc.start()
        ssc.awaitTermination()      // 遇错停止，否则不断运行

    }
}
