package SQL

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}

object 函数 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[6]")
      .appName("Test")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val source = Seq(
      ("Thin", "Cell phone", 6000),
      ("Normal", "Tablet", 1500),
      ("Mini", "Tablet", 5500),
      ("Ultra thin", "Cell phone", 5000),
      ("Very thin", "Cell phone", 6000),
      ("Big", "Tablet", 2500),
      ("Bendable", "Cell phone", 3000),
      ("Foldable", "Cell phone", 3000),
      ("Pro", "Tablet", 4500),
      ("Pro2", "Tablet", 6500)
    ).toDF("product", "category", "revenue")
    val toStrUDF: UserDefinedFunction = udf(toStr _)
    source.select('product, 'category, toStrUDF('revenue))
      .show()

    //窗口函数
    val window = Window.partitionBy('category)
      .orderBy('revenue.desc)
    source.select('product, 'category, dense_rank() over window as "rank")
      .where('rank <= 2)
      .show()
  }

  def toStr(revenue: Long): String = {
    (revenue / 1000) + "k"
  }
}
