package cn.bigdata.sparkcore.job

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

object Case4 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(this.getClass.getName.init)
      .master("local[*]").getOrCreate()

    import spark.implicits._
    val sc = spark.sparkContext

    // 准备数据
    val lst = Array((1, "2019-03-04", "2020-02-03"), (2, "2020-04-05", "2020-08-04"), (3, "2019-10-09", "2020-06-11"))

    val startdate ="startdate"
    val enddate = "enddate"
    val df = spark.createDataFrame(sc.makeRDD(lst)).toDF("id", startdate, "enddate")

    // 求出所有时间并集
    val df2 = df.select(startdate).union(df.select(enddate)).orderBy(startdate)
    // 通过表达式方式对时间进行行上移
    val df3 = df2.select(expr(startdate), expr("lead(startdate) over(order by startdate) as leaddate"))

    // 处理空值,返回最后结果
    println("DSL 运行结果")
    df3.select(expr(startdate).as("dslstartdate"), expr("case when leaddate is null then startdate else leaddate end as dslleaddate")).show

    df.createOrReplaceTempView("t1")

    println("SQL 运行结果")
    spark.sql(
      """
        |select startdate,case when leaddate is null then startdate else leaddate   end as leaddate
        |  from(
        |        select startdate, lead(startdate) over(order by startdate) leaddate
        |           from (
        |               select startdate from t1 union select enddate from t1
        |               ) order by startdate
        |  ) tmp
        |""".stripMargin).show()
    spark.stop()
  }
}
