package com.offcn.bigdata.spark.sql.p3

import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

/**
 * @Auther: BigData-LGW
 * @ClassName: SparkWindow
 * @Date: 2020/12/10 22:40
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object SparkWindow {
    def main(args: Array[String]): Unit = {
        Logger.getLogger("org.apache.spark").setLevel(Level.INFO)
        val spark = SparkSession.builder()
            .appName("SparkWindow")
            .master("local[*]")
            .getOrCreate()
        val df = spark.read.json("file:/F:/people.json")
        df.createOrReplaceTempView("person")
        var sql =
            """
              |select
              |name,
              |height,
              |province,
              |row_number() over(partition by province order by height desc) rank
              |from person
              |""".stripMargin
        spark.sql(sql).show
        sql =
            """
              |select
              |tmp.*
              |from(
              |select
              |name,
              |height,
              | province,
              |row_number() over(partition by province order by height desc) rank
              |from person
              |) tmp
              |where tmp.rank < 3
              |""".stripMargin
        spark.sql(sql).show
        spark.stop()
    }
}
