package com.doit.sparksql.day02

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}
import org.apache.spark.sql.{DataFrame, SparkSession}


/**
 * @DATE 2022/1/14/10:16
 * @Author MDK
 * @Version 2021.2.2
 * */
object SQL_TableAPI03 {
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("table-api02")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()

    //导入隐式和函数
    import org.apache.spark.sql.functions._
    import spark.implicits._


    val schema: StructType = StructType(Seq(
      StructField("name", DataTypes.StringType),
      StructField("cdate", DataTypes.StringType),
      StructField("money", DataTypes.DoubleType)
    )
    )
    val df: DataFrame = spark.read.csv("data/shop/shop.txt")
    df.select('name, 'cdate, 'money, row_number() over (Window.partitionBy("name").orderBy("cdate"))).show()





    spark.close()
  }
}
