package com.bigdata.hudi;

import lombok.extern.log4j.Log4j;
import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.catalog.Database;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.catalog.Table;

/**
 * description
 *
 * @author Cyber
 * <p> Created By 2025/3/3
 * @version 1.0
 */
@Log4j
public class SparkSqlOperations {
    public static void main(String[] args) throws AnalysisException {
        querry_sparkSql();
    }

    private static void querry_sparkSql() throws AnalysisException {
        /*
        pom.xml add spark-hive包
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-hive_2.12</artifactId>
            <version>${spark.version}</version>
        </dependency>
         */
        SparkSession sparkSession = new SparkSession.Builder()
                .appName("spark-sql")
                .master("local[2]")
                .config("hive.metastore.uris", "thrift://master:9083")
                .config("hive.metastore.warehouse.dir", "hdfs://master:9000/user/hive/warehouse")
                .enableHiveSupport()
                .getOrCreate();
        System.out.println("sparkSession.catalog().currentCatalog() = " + sparkSession.catalog().currentCatalog());
        System.out.println("sparkSession.catalog().currentDatabase() = " + sparkSession.catalog().currentDatabase());

        sparkSession.sql("use spark_catalog");
        sparkSession.sql("use myhudi");
        Dataset<Database> databaseDataset = sparkSession.catalog().listDatabases();
        log.info("===============================listDatabases===============================");
        databaseDataset.printSchema();
        databaseDataset.show(false);
        log.info("===============================listTables===============================");
        Dataset<Table> tableDataset = sparkSession.catalog().listTables();
        tableDataset.printSchema();
        tableDataset.show(false);
        log.info("sparkSession.catalog().listTables() = " + tableDataset);
        log.info("sparkSession.catalog().listTables(\"default\") = " + sparkSession.catalog().listTables("default"));

        Dataset<Row> catalog_sql = sparkSession.sql("show catalogs");
        catalog_sql.show();
        catalog_sql.printSchema();

        Dataset<Row> showDatabases = sparkSession.sql("show databases");

        showDatabases.show();
        showDatabases.printSchema();

        Dataset<Row> sql = sparkSession.sql("select * from myhudi.hudi_table order by id asc limit 50");
        sql.show(Integer.MAX_VALUE,false);
        sql.printSchema();

        sparkSession.close();
    }
}
