package com.xyz.module;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

public class HiveTestJava {
    public static void main(String[] args){
        Configuration conf = new Configuration();


        SparkSession sparkSession = SparkSession
                .builder()
                .master("local[*]")
                .appName("java spark sql example")
                .config("hive.metastore.uris", "thrift://10.195.185.31:9083")
                .config("spark.sql.warehouse.dir", "hdfs://10.195.185.31:8020/user/hive/warehouse")
                .config("spark.sql.broadcastTimeout", "36000")
                .enableHiveSupport()
                .getOrCreate();

        String sql = "select * from (select *,row_number() over(partition by a order by b asc) rn_min,\n" +
                "row_number() over(partition by a order by b desc) rn_max from t2 ) tmp\n" +
                "where rn_min<=2 or rn_max <=2";

        Dataset<Row> sqlDF = sparkSession.sql(sql);
        sqlDF.show();


    }

}
