package day8

import org.apache.spark.sql.SparkSession

object Test6_kaiChuang {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().master("local[*]").appName("yy").getOrCreate()
    import spark.implicits._

    val df2 = List(
      ("A","1",100),
      ("A","2",200),
      ("A","4",210),
      ("A","3",150),
      ("B","1",50),
      ("B","2",300),
      ("B","3",120),
      ("B","4",230)
    ).toDF("shop_name","jidu","sales")

    df2.createTempView("t_sales")

    spark.sql(
      """
        |
        |-- RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW  在当前窗口下的第一行到当前行
        |--   range/rows between x and y
        |--   unbounded preceding: 开窗后窗口内第一行
        |--   unbounded following: 开窗后窗口内最后一行
        |--   current row:        当前行
        |
        |select shop_name,jidu,sales,
        |       sum(sales) over(partition by shop_name order by jidu rows between current row  and unbounded following) x1
        |from t_sales
        |
        |""".stripMargin).show()

    spark.stop()
  }
}
