package com.xxxx.sqlTest;

import org.apache.spark.sql.SparkSession;

/**
 * @program: day0316
 * @description:
 * @author: CoreDao
 * @create: 2021-03-18 09:05
 **/

public class WindowFun {
    public static void main(String[] args) {
        SparkSession session = SparkSession.builder()
                .appName("window")
                .master("local")
                //2.0后hivecontext和sqlcontext合并为session，这是一个开关，对于两者切换
                .enableHiveSupport()
                .getOrCreate();

        /**
         * sales中包含 日期 类别 销售额
         * 按照类别进行分组，根据销售额情况进行降序排序
         */
        //建立
        /*session.sql("drop database if exists spark");
        session.sql("create database spark");
        session.sql("drop table if exists sales");
        session.sql("use spark");
        session.sql("create table if not exists sales (date string,category string, sales_sum int) " +
                "row format delimited fields terminated by '\t'");
        session.sql("load data local inpath 'src/main/resources/data/sales.txt' into table sales");*/
        //测试
        session.sql("use spark");
        session.sql("select date,category,sales_sum," +
                "row_number() over (partition by category order by sales_sum desc) as rank " +
                "from sales").explain();
        session.sql("select date,category,sales_sum," +
                "row_number() over (partition by category order by sales_sum desc) as rank " +
                "from sales").show();
        //结果
        /*session.sql("select date,category,sales_num,rank " +
                "from ( " +
                "select date,category,sales_sum," +
                "row_number() over (partition by category order by sales_sum desc) as rank " +
                "from sales) t " +
                "where t.rank <=3").show();*/



        session.stop();
    }
}
