package cn.lagou.sparksql

import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

object DateTrans {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName(this.getClass.getCanonicalName)
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")

    val list = List((1, "2019-03-04", "2020-02-03"),(2, "2020-04-05", "2020-08-04"),(3, "2019-10-09", "2020-06-11"))

    //将集合数据存入dateframe中再转换为结构性数据
    val df: DataFrame = spark.createDataFrame(list).toDF("id", "startdate", "enddate")

    //将startdate与enddate进行合并，取得所有日期
    val ds: Dataset[Row] = df.select("startdate").union(df.select("enddate"))
    ds.createOrReplaceTempView("t1")
    //ds.show()

    //通过窗口函数按startdate升序，再使用lead函数取到下一行的日期作为第二列数据，如果lead取不到数据则置为startdate
    spark.sql(
      """
        |select startdate, nvl(lead(startdate) over(order by startdate),startdate) enddate from t1
        |""".stripMargin).show()
    //select startdate, nvl(lead(startdate) over(order by startdate),startdate) from t1
    spark.close()
  }
}
