package com.test.cn.spark.core

import org.apache.spark.sql.SparkSession

object SqlQuestion {

  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .appName(this.getClass.getCanonicalName.init)
      .master("local[*]")
      .getOrCreate()
    import spark._
    import spark.implicits._


    sql(
      """
        |create or replace temporary view sql_info
        |using csv
        |options(path "spark_scala_home_work/src/data/sql.txt",
        |       header "true",
        |       inferschema "true",
        |       delimiter " ")
        |
      """.stripMargin)


    sql(
      """
        |select
        |dd,nvl(lead(dd) over(partition by aa order by dd),dd) as dd_2
        |from
        |(select 1 as aa, startdate as dd  from sql_info
        |union all
        |select 1 as aa, enddate as dd from sql_info
        |) as tmp order by dd
      """.stripMargin).show()



    spark.stop()
  }

}
