package com.spark.mooc.ch7_sparkstreaming.part04_conversionOperation

import com.spark.mooc.ch7_sparkstreaming.part02_basicInputSource.socket.StreamingExamples
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @description:
 * @time: 2020/11/30 20:46
 * @author: lhy
 */
object NetworkWordCountStateful {
    def main(args: Array[String]): Unit = {
        // 定义状态更新函数
        def updateFunction = (values:Seq[Int], state:Option[Int]) => {
            val currentCount: Int = values.foldLeft(0)(_+_)
            val previousCount: Int = state.getOrElse(0)
            Some(currentCount + previousCount)
        }
        StreamingExamples.setStreamingLogLevels()       // 设置日志显示级别
        val conf: SparkConf = new SparkConf().setAppName("NetworkWordCountStateful").setMaster("local[2]")
        val ssc = new StreamingContext(conf,Seconds(5))
        ssc.checkpoint("output/streaming/checkpoint")
        val lines: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.21.101",9999)
        val words: DStream[String] = lines.flatMap(_.split(" "))
        val wordDStream: DStream[(String, Int)] = words.map((_,1))
        val stateDStream: DStream[(String, Int)] = wordDStream.updateStateByKey[Int](updateFunction)
        stateDStream.print()
        // 把DStream保存到文本文件中
        stateDStream.saveAsTextFiles("output/streaming/output")
        ssc.start()
        ssc.awaitTermination()
    }
}
