package com.shujia.spark.streaming

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}

object Demo4DStreamRDD {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[3]")
      .appName("rdd")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val ssc = new StreamingContext(sparkContext = spark.sparkContext, batchDuration = Durations.seconds(5))

    val linesDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)

    //foreachRDD: 训练处理每一个RDD
    linesDS.foreachRDD(rdd => {
      //1 使用RDD的API处理数据
      rdd
        .flatMap(_.split(","))
        .map((_, 1))
        .reduceByKey(_ + _)
      //.foreach(println)

      //2 使用spark sql api 处理数据
      val linesDF: DataFrame = rdd.toDF("line")
      linesDF
        //一行转换成多行
        .select(explode(split($"line", ",")) as "word")
        .groupBy($"word")
        .agg(count($"word") as "count")
        .show()
    })

    //transform: 传一个一个RDD,返回一个RDD
    val wordsDS: DStream[String] = linesDS
      .transform(rdd => rdd.flatMap(_.split(",")))

    ssc.start()
    ssc.awaitTermination()
  }
}
