package com.xiaohu.streaming

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}

object Demo7SaveFile {
  def main(args: Array[String]): Unit = {
    //使用DataFrame的语法
    val sparkSession: SparkSession = SparkSession.builder()
      .master("local[2]")
      .appName("rdd与DStream的关系")
      .config("spark.sql.shuffle.partitions", "1")
      .getOrCreate()
    import org.apache.spark.sql.functions._
    import sparkSession.implicits._

    //使用RDD的语法
    val sparkContext: SparkContext = sparkSession.sparkContext

    //使用DStream的语法
    val streamingContext = new StreamingContext(sparkContext, Durations.seconds(5))
    val infoDS: ReceiverInputDStream[String] = streamingContext.socketTextStream("master", 10086)

    val resDS: DStream[(String, Int)] = infoDS.transform((rdd: RDD[String]) => {
      //直接对rdd进行处理，返回新的rdd
      //      val resRDD: RDD[(String, Int)] = rdd.flatMap(_.split(" "))
      //        .map((_, 1))
      //        .reduceByKey(_ + _)
      //      resRDD

      //将rdd转df，使用sql做分析
      //rdd和df之间可以转换
      val df1: DataFrame = rdd.toDF.select($"value" as "info")
      df1.createOrReplaceTempView("words")
      val resDF: DataFrame = sparkSession.sql(
        """
          |select
          |t1.wds as word,
          |count(1) as counts
          |from
          |(
          |select
          |explode(split(info,' ')) as  wds
          |from words) t1
          |group by t1.wds
          |""".stripMargin)

      val resRDD: RDD[(String, Int)] = resDF.rdd.map((row: Row) => (row.getAs[String](0), row.getAs[Int](1)))
      resRDD
    })

//    resDS.print()
    //将结果存储到磁盘中
    //只能设置文件夹的名字和文件的后缀
    //每一批次运行，都会产生新的小文件夹，文件夹中有结果数据文件
    resDS.saveAsTextFiles("spark/data/streamout/stream","txt")

    streamingContext.start()
    streamingContext.awaitTermination()
    streamingContext.stop()

  }
}
