package org.shj.spark.sql

import org.apache.spark.sql.SparkSession
import org.shj.spark.util.Util
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.Row



object SqlGroupTopN {
  def main(args: Array[String]): Unit = {
    
    val ss = SparkSession.builder().appName("SqlGroupTopN").master("local").enableHiveSupport().getOrCreate()
    
    val rdd = ss.sparkContext.textFile(Util.fullPath("topNGrp.txt"))
    val rowRdd = rdd.map(line => {
      val arr = line.split("\\s+")
      Row(arr(0), arr(1), arr(2).toInt)
    })
    
    val schema = StructType(Array(StructField("category", StringType), 
                              StructField("band", StringType),
                              StructField("soldNum", IntegerType)))
    val df = ss.createDataFrame(rowRdd, schema)
    df.createOrReplaceTempView("sell")
    
    val sql = """select tmp.category, tmp.band, tmp.soldNum 
                 from (select category, band, soldNum, row_number() OVER (PARTITION By category order by soldNum desc) rank
                       from sell) tmp 
                 where tmp.rank <= 2 """
    
    ss.sql(sql).show()
    
    ss.stop()
  }
}