package org.niit.stream

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming_09 {

  /*
    Spark Streaming 程序必须要有输出语句
        print
        saveAsTextFiles
        saveAsObjectFiles
        SaveAsHadoopFiles
        foreachRDD : 将DStream 转换成 RDD 进行输出 。。输出到MySQL  进行网络传输

      transform 和 foreachRDD :两者都是将DStream 转换成 RDD ，但是 foreachRDD 带有输出的功能
   */
  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")
    val ssc = new StreamingContext(sparkConf, Seconds(3))
    ssc.sparkContext.setLogLevel("ERROR")



    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)

    val flatMap: DStream[String] = lines.flatMap(_.split(" "))
    val map: DStream[(String, Int)] = flatMap.map((_, 1))
    val res: DStream[(String, Int)] = map.reduceByKey(_ + _)

    //输出
    res.foreachRDD(rdd=>{
      rdd.collect().foreach( println )
    })

    ssc.start()
    ssc.awaitTermination()

  }

}
