package com.sub.spark.sql.source;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * @ClassName HiveSource
 * @Description: 从Hive读取数据
 * @Author Submerge.
 * @Since 2025/5/24 19:58
 * @Version 1.0
 */
public class HiveSource {

    public static void main(String[] args) {

        // 设置Hive的用户名
        System.setProperty("HADOOP_USER_NAME", "hadoop");

        SparkSession sparkSession = SparkSession.builder()
                .appName("sub-spark-hive")
                .master("local[2]")
                .enableHiveSupport()//启用hive支持
                .getOrCreate();


        Dataset<Row> dbs = sparkSession.sql("show databases;");
        dbs.show();

        //需配置hdfs
        //sparkSession.sql("create database if not exists sub_spark;");

        //sparkSession.sql("use sub_spark;");

        //创建表
        //sparkSession.sql("create table if not exists spark_user(name string,age int);");

        //插入数据
        //sparkSession.sql("insert into table spark_user values('spark',18);");


        //sparkSession.sql("show tables").show();
        //sparkSession.sql("create table user_info(name String,age bigint)");
        //sparkSession.sql("insert into table user_info values('zhangsan',10)");
        //sparkSession.sql("select * from user_info").show();

        //查询数据
        //Dataset<Row> sparkUser = sparkSession.sql("select * from spark_user;");

        //sparkUser.show();
        sparkSession.sql("use sub_case;");
        sparkSession.sql("show tables;").show();

        Dataset<Row> sql = sparkSession.sql("select * from sub_case.index_msg_drtop10_count;");
        sql.show();

        sparkSession.stop();

    }
}
