import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

object DataExchange {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("DateSort")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.setLogLevel("warn")

    //读取数据
    import org.apache.spark.sql.functions._
    import spark.implicits._
    val df: DataFrame = List("1 2019-03-04 2020-02-03", "2 2020-04-05 2020-08-04", "3 2019-10-09 2020-06-11").toDF()



    df.flatMap{
      case Row(line: String) =>
        line.split("\\s+").tail
    }.toDF("date").createOrReplaceTempView("t1")

    spark.sql("select * from t1").show()

    spark.sql(
      """
        |select date, max(date) over (order by date rows between current row and 1 following) as date1
        |from t1
        |
        |""".stripMargin).show()




  }
}
