package study

import java.sql.{DriverManager, Timestamp}

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object MySink {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("spark").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    sc.setLogLevel("WARN")
    val ssc: StreamingContext = new StreamingContext(sc, Seconds(5)) //每隔5s划分一个批次

    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("spark03", 9999)

    /**
     * 每隔5s计算最近10s的数据
     */

    val sortDS: DStream[(String, Int)] = lines.flatMap(_.split(" ")).map((_, 1))
      .reduceByKeyAndWindow((a: Int, b: Int) => a + b, Seconds(10), Seconds(5))
      .transform(
        rdd => {
          val sortRDD = rdd.sortByKey(false)
          val top3: Array[(String, Int)] = sortRDD.take(3)
          println("top3 ===>")
          top3.foreach(println)
          sortRDD
        }
      )

    sortDS.foreachRDD((rdd, time) => {
      val milliseconds = time.milliseconds
      println("自定义输出===>")
      println("时间：" + milliseconds)
      println("============")
      //1. 输出到控制台
      rdd.foreach(println)
      //2. 输出到磁盘
      rdd.saveAsTextFile("data\\output\\result-" + milliseconds)
      //3. 输出到mysql
      /**
       * mysql> create table wordcount(
       * -> time timestamp,
       * -> word varchar(20),
       * -> count int);
       */
      rdd.foreachPartition(iter => {
        val connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/spark_streaming?characterEncoding=UTF-8", "root", "000000")

        val sql: String = "insert into wordcount (time, word, count) values (?, ?, ?);"
        val ps = connection.prepareStatement(sql)
        iter.foreach(t => {
          val word = t._1
          val count = t._2
          ps.setTimestamp(1, new Timestamp(milliseconds))
          ps.setString(2, word)
          ps.setInt(3, count)
          ps.addBatch()
        })
        ps.execute()

        if (connection != null) connection.close()
        if (ps != null) ps.close()
      })
    })

    ssc.start()
    ssc.awaitTermination() //注意:流式应用程序启动之后需要一直运行等待手动停止/等待数据到来
    ssc.stop(stopSparkContext = true, stopGracefully = true) //优雅关闭
  }

}
