package com.lagou.no6

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

object No6 {
    def main(args: Array[String]): Unit = {
        //设置日志级别
        Logger.getLogger("org").setLevel(Level.WARN)

        //创建sparksession对象
        val spark = SparkSession
                .builder()
                .appName(s"${this.getClass.getCanonicalName}")
                .master("local[*]")
                .getOrCreate()
        //数据集合
        val list = List((1,"2019-03-04","2020-02-03"),(2,"2020-04-05","2020-08-04"),(3,"2019-10-09","2020-06-11"))
        //创建成DF，注册成表，并且指定列名
        spark.createDataFrame(list).toDF("id","startDate","endDate").createOrReplaceTempView("t")


        //子查询使用炸裂函数，将结束日期和开始日期放到同一个列中
        //然后查询子查询的结果，对全局开窗，将所有记录的id赋值为1
        //将每条数据取下一条的数据，若为空，则为自己本身的值
        spark.sql(
            """
              |select allDate as startDate
              |       ,nvl(lead(allDate) over(partition by id order by allDate),allDate) as endDate
              |from
              |(select 1 as id,
              |       explode(split(concat(startDate,';',endDate),';')) as allDate
              |from t) a
              |order by allDate
            """.stripMargin).show(false)


        spark.close()
    }
}
