package com.niit.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Date:2025/5/19
 * Author：Ys
 * Description:
 */
object SparkStreaming_wordcount {

  def main(args: Array[String]): Unit = {
    //1.准备环境
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming_wordcount")
    val ssc = new StreamingContext(sparkConf, Seconds(3))//Seconds(3) 采集周期 每隔3秒读取数据
    ssc.sparkContext.setLogLevel("ERROR")
    //利用套接字的方式，监听端口 创建 DStream 。监听本机9999 获取数据
    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)


    //分析代码
    //比如，3秒内发送过来的数据 是 "spark hello spark hello"==>
    //1.按照 空格 分割 将每一行数据，形成一个个的单词
    val words: DStream[String] = lines.flatMap(_.split(" "))
    //==> [spark,hello,spark,hello]
    val wordOne: DStream[(String, Int)] = words.map((_, 1))
    //==> [(spark,1),(hello,1),(spark,1),(hello,1)]
    val wordcount: DStream[(String, Int)] = wordOne.reduceByKey(_ + _)

    //打印
    wordcount.print()

    //2.开启采集器
    ssc.start()
    //3.等待采集器关闭
    ssc.awaitTermination()

  }

}
