package spark.sql

import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Created by ibf on 2018/2/4.
 */
object Hive_Row_Number {
  def main(args: Array[String]) {
    //1、创建spark上下文
    val conf = new SparkConf()
      .setAppName("Hive_Row_Number")
      .setMaster("local[*]")
    val sc = SparkContext.getOrCreate(conf)
    //这里要考虑是否要读取hive的数据，或者使用HQL
    //如果需要：就使用HiveContext，如果不需要，就使用SQLContext
    val sqlContext = new HiveContext(sc)

    /**
     * select deptno,sal,ename,row_number() over (partition by deptno order by sal desc) as rnk
     * from class19.emp
     */
    val df = sqlContext.sql("""select
                  deptno,sal,ename,row_number() over (partition by deptno order by sal desc) as rnk
                  from class19.emp""")
    //取rnk为前三的

    df.show
    df.registerTempTable("tmp")
    println("=============注册成临时表的方法=================")
    sqlContext.sql("""select
                  deptno,sal,ename, rnk
                  from tmp
                   where rnk <= 3""").show()
    println("=============子查询嵌套的方法=================")
    sqlContext.sql(
      """
        |SELECT
        |deptno,sal,ename,rnk
        |FROM
        |(SELECT
        |deptno,sal,ename,
        |ROW_NUMBER() OVER (PARTITION BY deptno ORDER BY sal DESC) as rnk
        |FROM class19.emp) a
        |WHERE rnk <= 3
      """.stripMargin).show()
  }
}
