package sparkcore.day7.lesson07

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.hive.HiveContext

/**
  * Created by Administrator on 2018/5/3.
  *
  * Spark1-> SparkSQL
  *  SQLContext
  *  HiveContext 主要用访问hive数据
  * Spark2 -> SparkSQL
  *  SparkSession
  *
  *
  *  Hive：
  *    是一个数据仓库，或者说是一个数据仓库的接口
  *  SparkSQL:
  *    SparkSQL的数据仓库是借助Hive去实现
  *
  *
  */
object HiveTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("test")
    val sc = new SparkContext(conf)
    val sqlContext = new HiveContext(sc)
    sqlContext.sql("select count(*) from aura.user").show()

    val sql=
      """
         SELECT id,username,salary,bon,dep FROM (SELECT id,username,salary,bon,dep,ROW_NUMBER() OVER (PARTITION BY dep ORDER BY salary desc) rank FROM aura.user) tmp WHERE tmp.rank <= 2
      """
    sqlContext.sql(sql).show()
  }

}
