from pyspark.sql import SparkSession, Window
from pyspark.sql.functions import *
from pyspark.sql.types import *

spark = SparkSession \
    .builder \
    .master('local') \
    .appName('HelloSpark') \
    .getOrCreate()

data = [
    ("Thin", "Cell phone", 6000),
    ("Normal", "Tablet", 1500),
    ("Mini", "Tablet", 5500),
    ("Ultra thin", "Cell phone", 5000),
    ("Very thin", "Cell phone", 6000),
    ("Big", "Tablet", 2500),
    ("Bendable", "Cell phone", 3000),
    ("Foldable", "Cell phone", 3000),
    ("Pro", "Tablet", 4500),
    ("Pro2", "Tablet", 6500)
]

df = spark.sparkContext.parallelize(data).toDF(['product', 'category', 'revenue'])
df.show()

window = Window.partitionBy('category').orderBy(col('revenue').desc())

df.select('product', 'category', 'revenue', dense_rank().over(window).alias('rank'))\
    .where('rank <= 2') \
    .show(truncate=False)

# sql

df.createOrReplaceTempView('products')

spark.sql("""
    SELECT product, category, revenue, rank
    FROM 
    (
    SELECT product, category, revenue, dense_rank() over (PARTITION BY category ORDER BY revenue DESC) AS rank
    FROM products
    ) tmp
    WHERE rank <= 2
""").show(truncate=False)