package com.doit.day07

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.types.{DataTypes, StructType}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo03Windows {
  def main(args: Array[String]): Unit = {

    val session = SparkSession.builder()
      .appName("test")
      .master("local[*]")
      .getOrCreate()

    import session.implicits._
    import org.apache.spark.sql.functions._

    val structType = new StructType()
      .add("oid", DataTypes.StringType)
      .add("price", DataTypes.DoubleType)
      .add("city", DataTypes.StringType)
      .add("category", DataTypes.StringType)
      .add("id", DataTypes.StringType)
    // 加载订单
    val orderDF = session.read.schema(structType).csv("data/orders/order.csv")


    orderDF.createTempView("tb_order")


    /*
        session.sql(
          """
            |select
            |* ,
            |sum(price) over(partition by city)  sum_money
            |from
            |tb_order
            |""".stripMargin).show()
    */
    val window = Window.partitionBy("city")
    orderDF.select(orderDF("oid"), col("price")  ,$"orice", 'category, 'id, sum("price").over(window)).show()


  }

}
