package com.gy.spark.sparksql.windowfun

import org.apache.spark.sql.SparkSession

object RowNumberWindowFun {
  def main(args: Array[String]): Unit = {
//    val conf = new SparkConf()
//    conf.setAppName("windowfun")
//    val sc = new SparkContext(conf)
//    val hiveContext = new HiveContext(sc)

		val hiveContext = SparkSession
			.builder()
			.master("local")
			.appName(this.getClass.getSimpleName)
			//      .config("spark.sql.warehouse.dir", warehouseLocation)
			.enableHiveSupport()
			.getOrCreate()

    hiveContext.sql("use spark");
		hiveContext.sql("drop table if exists sales");
		hiveContext.sql("create table if not exists sales (riqi string,leibie string,jine Int) "
				+ "row format delimited fields terminated by '\t'");
		hiveContext.sql("load data local inpath './spark/input/sql/sales' overwrite into table sales");
		/**
		 * 开窗函数格式：
		 * 【 rou_number() over (partitin by XXX order by XXX) 】
		 */
		val result = hiveContext.sql("select riqi,leibie,jine "
							+ "from ("
								+ "select riqi,leibie,jine,"
								+ "row_number() over (partition by leibie order by jine desc) rank "
								+ "from sales) t "
						+ "where t.rank<=3");
		result.show();
    hiveContext.stop()
  }
}