package io.wen.bd.s6m2.spark

import org.apache.spark.sql.SparkSession

case class DateRange(id: String, startDate: String, endDate: String)

object SQLTransformer {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("spark session")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.setLogLevel("WARN")
    import spark.implicits._

    val dateRangePath = "file:///D:\\workspace\\project\\bd-hw\\stage6-module2-spark-core-sql\\data\\date_range.dat"

    val dateRangeDS = sc
      .textFile(dateRangePath)
      .map(line => line.split(" "))
      .map(arr => DateRange(arr(0), arr(1), arr(2)))
      .toDS()

    dateRangeDS.createOrReplaceTempView("date_range")

    val df = spark.sql(
      """
        |with tmp as (
        |  select startDate dt
        |  from date_range
        |  union
        |  select endDate dt
        |  from date_range
        |)
        |select
        |  dt,
        |  nvl(lead(dt) over (order by dt), dt) nextdt
        |from tmp
      """.stripMargin)

    df.show()
    println(df.queryExecution)
    println("=================")
    println(df.queryExecution.optimizedPlan)

  }
}
