package cn.lagou.sparkcore

import org.apache.spark.SparkContext
import org.apache.spark.sql.expressions.{Window, WindowSpec}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

object homework4 {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().appName(this.getClass.getCanonicalName).master("local[*]").getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")

    //spark sql 导包
    import spark.implicits._
    import org.apache.spark.sql.functions
    val df: DataFrame = List("1 2019-03-04 2020-02-03",
      "2 2020-04-05 2020-08-04",
      "3 2019-10-09 2020-06-11").toDF()

    //DSL操作
    val w1: WindowSpec = Window.orderBy($"value" asc).rowsBetween(0, 1)
    println("------------------DSL操作----------------------")
    df.as[String]
      .map(str => str.split(" ")(1) + " " + str.split(" ")(2))
      .flatMap(str => str.split("\\s+"))
      .distinct()
      .sort($"value" asc)
      .withColumn("new",max ("value") over (w1))
      .show()

    //SQL操作
    println("-----------------sql操作-------------------------")
    df.flatMap{case Row(line:String) =>
      line.split("//s+").tail
    }.toDF("date")
      .createOrReplaceTempView("t1")

    spark.sql(
      """
        |select date,max(date) over(order by date rows between current row and 1 fol
        |  from t1
        |""".stripMargin
    ).show


    spark.stop()

  }
}
