package io.sqrtqiezi.spark.streaming

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object StreamingFiles {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("files streaming practise")
      .getOrCreate()

    // define data schema
    val static = spark.read.json("data/activity-data/")

    static.printSchema()

    val streaming = spark.readStream
      .schema(static.schema)
      .option("maxFilesPerTrigger", 1)
      .json("data/activity-data")

//    val activityCounts = streaming.groupBy("gt")
//      .count()

    spark.conf.set("spark.sql.shuffle.partitions", 5)

//    val activityQuery = activityCounts.writeStream
//      .queryName("activity_counts")
//      .format("console")
//      .outputMode("complete")
//      .start()

//    val activityQuery = activityCounts.writeStream
//      .format("console")
//      .outputMode("append")
//      .start()


//    println(spark.streams.active.mkString("Array(", ", ", ")"))



    val withEventTime = streaming.selectExpr(
      "*",
      "cast(cast(Creation_Time as double)/1000000000 as timestamp) as event_time")

    val activityQuery = withEventTime
      .withWatermark("event_time", "5 hours")
      .groupBy(window(col("event_time"), "10 minutes"))
      .count()
      .writeStream
      .queryName("events_per_window")
      .format("memory")
      .outputMode("complete")
      .start()

    for(i <- 1 to 10) {
      spark.sql("select * from events_per_window")
        .show(truncate = false)

      Thread.sleep(1000)
    }

//    activityQuery.awaitTermination()
    spark.stop()
  }
}
