package org.example.SparkStreamCount

import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf}

object MyWordCount {
  def main(args: Array[String]): Unit = {
    println("进入函数统计")
    var myIp = "192.168.60.128"
    var addr="9000"

    val conf = new SparkConf().setAppName("MyWordCount").setMaster("local[*]")
    val ssc = new StreamingContext(conf,Seconds(5))
//创建socket流
    val mySocket :ReceiverInputDStream[String] = ssc.socketTextStream(myIp,addr.toInt,StorageLevel.MEMORY_AND_DISK_SER)
//对数据进行处理
    val wordCount:DStream[(String,Int)] = mySocket.flatMap(_.split(" ")).map(_-> 1).reduceByKey(_ + _)
    wordCount.print()
//    获取词频的全部数据
    val allData = wordCount.transform(rdd => rdd.context.union(rdd, rdd))
////    将数据保存到txt文件中
    allData.saveAsTextFiles("D:\\spark\\")

    ssc.start()
    ssc.awaitTermination()

  }

}
