package core.sql.多数据源;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.util.Properties;

public class Spark05_HIVE {
    public static void main(String[] args) {



        System.setProperty("HADOOP_USER_NAME","test");


        SparkSession sparkSession = SparkSession
                .builder()
                .enableHiveSupport()
                .master("local[*]")
                .appName("Spark05_HIVE")
                .getOrCreate();

        try {
            sparkSession.sql("show tables ").show();


        } catch (Exception e) {
            e.printStackTrace(); // 捕获并打印异常
        } finally {
            sparkSession.close();
        }
    }
}
