# coding:utf8
from pyspark.sql import SparkSession
from pyspark.sql.types import StructType, StringType, IntegerType

if __name__ == '__main__':
    # 0. 构建执行环境入口对象SparkSession
    spark = SparkSession.builder.\
        appName("test").\
        master("local[*]").\
        config("spark.sql.shuffle.partitions", 2).\
        getOrCreate()
    sc = spark.sparkContext

    rdd = sc.parallelize([
        ("张三", 'class_1', 99),
        ("王五", 'class_2', 35),
        ("王三", 'class_3', 57)
         ])
    schema = StructType().add("name", StringType()).\
        add("class", StringType()).\
        add("score", IntegerType())
    df = rdd.toDF(schema)

    df.createTempView("stu")

    # 1. T0D0聚合窗口函数的演示
    spark.sql("""
        SELECT *, AVG(score) OVER() as avg_score from stu
        """).show()

    # 2. T0D0排序相关的窗口函数计算
    # RAKN over,DENSE_RANK over ROW_NUABER over
    spark.sql("""
        SELECT *, ROW_NUMBER() OVER(ORDER BY score DESC) As row_number_rank, 
        DENSE_RANK() OVER(PARTITION BY class ORDER BY score DESC) As dense_rank, 
        RANK() OVER(ORDER BY score) AS rank 
        FROM stu
        """).show()

    # 3. TOD0 NTILE
    spark.sql("""
    SELECT *, NTILE(6) OVER(ORDER BY score DESC) FROM stu
    """).show()
