package kk.learn.spark.work._6

import org.apache.spark.sql.SparkSession

/**
 * <p>
 *
 * </p>
 *
 * @author KK
 * @since 2021-03-05
 */
object DummySql {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("DummySql")
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
    sc.setLogLevel("warn")


    // 读取文件
    val schema = "id int, startdate string, enddate string"
    spark.read.format("csv")
      .option("sep", " ")
      .schema(schema)
      .load("code/data/dummy_sql_input.dat")
      .createOrReplaceTempView("table_a")


   // 数据处理
    spark.sql(
      """
        |
        |select
        | date leftDate, nvl(lead(date) over (order by date), date) rightDate
        |from (
        |select startdate as date
        |from table_a
        |union
        |select enddate as date
        |from table_a
        |) tmp
        |order by leftDate
        |""".stripMargin).show

    spark.close()
  }
}
