import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.{Window, WindowSpec}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * ClassName: Homework6 <br/>
 * Description: <br/>
 * date: 2021/7/11 15:03<br/>
 *
 * @author Hesion<br/>
 * @version
 * @since JDK 1.8
 */
object Homework6 {
  def main(args: Array[String]): Unit = {
    // 初始化,框架代码
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName).setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder()
      .config(conf)
      .getOrCreate()
    // 设置日志级别
    spark.sparkContext.setLogLevel("WARN")
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val df = List("1 2019-03-04 2020-02-03", "2 2020-04-05 2020-08-04", "3 2019-10-09 2020-06-11").toDF()

    val w1: WindowSpec = Window.orderBy($"value" asc).rowsBetween(0, 1);
    df.as[String]
      //去除前面的序号
      .map(line=>line.split(" ")(1)+" "+line.split(" ")(2))
    //开始时间、结束时间进行切分 flatmap 压扁
      .flatMap(str=>str.split("\\s+"))
      //去重
      .distinct()
      //排序
      .sort($"value" asc)
    //每两列输出一行
      .withColumn("new",max("value") over(w1))
      .show()

    spark.close()
  }
}
