package cn.itcast.structured

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.streaming.{OutputMode, Trigger}

object Triggers {

  def main(args: Array[String]): Unit = {
    // 1. 创建 SparkSession
    val spark = SparkSession.builder()
      .appName("triggers")
      .master("local[6]")
      .getOrCreate()

    spark.sparkContext.setLogLevel("WARN")

    import spark.implicits._

    // timestamp, value
    val source = spark.readStream
      .format("rate")
      .load()

    val result = source

    result.writeStream
      .format("console")
      .outputMode(OutputMode.Append())  // 展示每一批次的结果
      .trigger(Trigger.ProcessingTime("5 seconds"))  // 5s划分一个批次
      .start()
      .awaitTermination()

  }

}
