package com.haozhen.homework

import org.apache.spark.sql.expressions.Window
/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/3  17:41
  */
object Work6 {

  def main(args: Array[String]): Unit = {
    import org.apache.spark.sql.SparkSession
    import org.apache.spark.sql.Row
    val spark = SparkSession.builder().master("local[*]").appName("work6").getOrCreate()

    spark.sparkContext.setLogLevel("WARN")

    import org.apache.spark.sql.DataFrame
    import org.apache.spark.sql.functions._
    import spark.implicits._

    val properties = new java.util.Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","root")
    val work6: DataFrame = spark.read.jdbc("jdbc:mysql://127.0.0.1:3306/test1?serverTimezone=GMT","work6",properties)

//    work6.show()

    //DSL方式

    val w1 = Window.orderBy($"value"asc).rowsBetween(0,1)

    work6.map(v=>v.getDate(1)+" "+ v.getDate(2)).flatMap(str=>str.split("\\s+"))
      .distinct()
      .sort($"value" asc)
      .withColumn("new",max("value") over (w1))
      .show()


      //SQL方式

      work6.createOrReplaceTempView("work6")

      spark.sql("select t1.date from (select startdate as date from work6 union select enddate as date from work6) as t1 order by 1").createOrReplaceTempView("t1")

      spark.sql(
        """
          | select date ,Nvl(lead(date) over( order by date),date) nextDate from t1
        """.stripMargin).show()
  }

}
