package newStreaming

import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Durations, StreamingContext}

object transform extends App {
  val sparkConf = new SparkConf()
  sparkConf.setAppName("transform")
  sparkConf.setMaster("local[4]")
  val sparkContext = new SparkContext(sparkConf)
  val sql = new SQLContext(sparkContext)

  import sql.implicits._

  val ssc = new StreamingContext(sparkContext, Durations.seconds(5))
  val ds = ssc.socketTextStream("node2", 9999)
  // 表面是DS的转化,底层是RDD
  //  val countDS = ds.transform(rdd => {
  //    rdd.foreach(println)
  //    val countRdd = rdd.flatMap(_.split(",")).map((_, 1)).reduceByKey(_ + _)
  //    countRdd
  //  })
  //  countDS.print()

  ds.foreachRDD(rdd => {
    if (!rdd.isEmpty()) {
      val countRDD = rdd.flatMap(_.split(",")).map((_, 1)).reduceByKey(_ + _)
      val countDF = countRDD.toDF("word", "count")
      countDF.registerTempTable("wordCount")
      //    countDF.write.mode(SaveMode.Overwrite).jdbc()
      val count = sql.sql(
        """
          |select * from wordCount
          |""".stripMargin)

      count.rdd.saveAsTextFile("data/wordCount")
    }
  })
  /**
   * ds -> rdd
   * ds -> df
   * ds -> rdd -> df
   * rdd 是不能转成 ds的
   * df -> rdd
   *
   */
  ssc.start()
  ssc.awaitTermination()
  ssc.stop()
}
