package com.hzh.SparkStreaming

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.{Durations, StreamingContext}

object Demo4DSToRDDAndDF {
  def main(args: Array[String]): Unit = {
    /**
     * 创建环境
     *
     */

    val spark: SparkSession = SparkSession
      .builder()
      .config("spark.sql.shuffle.partitions", 1)
      .master("local[2]")
      .appName("Demo4DSToRDDAndDF")
      .getOrCreate()

    val sc: SparkContext = spark.sparkContext

    val ssc = new StreamingContext(sc,Durations.seconds(5))

    val sscDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)


    //读取socket得到一个ds

    /**
     * DStream底层也是RDD，每隔一段时间将接收到的数据封装成一个RDD
     *  转换成RDD算子之后不能使用有状态算子
     */

    import spark.implicits._


    sscDS.foreachRDD((rdd:RDD[String])=>{
     //在这个里面可以写rdd代码
      rdd
        .flatMap(_.split(","))
        .map((_,1))
        .reduceByKey(_+_)
//        .foreach(println)
      //RDD可以转DF写SQL或者DSL
      val lineDF: DataFrame = rdd.toDF("line")
      lineDF.createOrReplaceTempView("lines")

      spark.sql(
        """
          |select word,count(1) as c from
          |(select explode(split(line,',')) as word
          |from lines
          |) as a
          |group by word
          |
          |""".stripMargin)

        .show()




    })





    ssc.start()
    ssc.awaitTermination()
    ssc.stop()




  }
}
