package sparkstreaming.nineteenthday2.lesson2

import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.ReceiverInputDStream

object OutputTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(s"${this.getClass.getSimpleName}").setMaster("local[4]")
    val sc = new SparkContext(conf)
    val ssc: StreamingContext = new StreamingContext(sc, Seconds(2))

    val dstream: ReceiverInputDStream[String] = ssc.socketTextStream("hadoop01", 9999)
    val wordcountDStream = dstream.flatMap(line => line.split(","))
      .map((_, 1))
      .reduceByKeyAndWindow(
        (x: Int, y: Int) => {
          x + y
        }, Seconds(6), Seconds(4), new HashPartitioner(4))

    wordcountDStream.foreachRDD( rdd =>{
      rdd.foreachPartition(partition =>{
        partition.foreach(println(_))
      })
    })

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()
  }

}
