package part04

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/*
* 4、A表有三个字段：ID、startdate、enddate，有3条数据：
* 1 2019-03-04 2020-02-03
* 2 2020-04-05 2020-08-04
* 3 2019-10-09 2020-06-11
* 写SQL（需要SQL和DSL）将以上数据变化为：
* 2019-03-04  2019-10-09
* 2019-10-09  2020-02-03
* 2020-02-03  2020-04-05
* 2020-04-05  2020-06-11
* 2020-06-11  2020-08-04
* 2020-08-04  2020-08-04
*
* */
object SparkSql {
  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession
      .builder()
      .appName("SparkSql")
      .master("local[*]")
      .getOrCreate()

    val sc = sparkSession.sparkContext


    val data = List(("1", "2019-03-04", "2020-02-03"),
      ("2", "2020-04-05", "2020-08-04"),
      ("3", "2019-10-09", "2020-06-11"))

    val df: DataFrame = sparkSession.createDataFrame(data).toDF("ID", "startdate", "enddate")
    df.createOrReplaceTempView("A")

    import sparkSession.implicits._

    //DSL
    val ds: Dataset[String] = df.map(f => f.getAs[String](1) + " " + f.getAs[String](2))
      .flatMap(f => f.split("\\s+"))
      .distinct()

    ds.sort($"value")
      .withColumn("newDate", max("value") over (Window.orderBy("value").rowsBetween(0, 1)))
      .show

    //SQL
    ds.createOrReplaceTempView("B")
    sparkSession.sql(
      """
        |select value,max(value) over(order by value rows between current row and 1 following) as newDate
        |from B
        |""".stripMargin).show

    sparkSession.close()
  }

}
