package iceberg.spark;

import org.apache.spark.sql.SparkSession;

public class MySparkIcebergBuilder {

    static public SparkSession getSparkSession() {
        return SparkSession.builder().master("local").appName("SparkOperateIceberg")
                //设置Hive Catalog
                // need to turn on hive metastore service
                .config("spark.sql.catalog.hive_prod", "org.apache.iceberg.spark.SparkCatalog")
                .config("spark.sql.catalog.hive_prod.type", "hive")
                .config("spark.sql.catalog.hive_prod.uri", "thrift://chdp02:9083")
                .config("iceberg.engine.hive.enabled", "true")
                .config("spark.sql.extensions", "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions")
                //设置Hadoop Catalog
                .config("spark.sql.catalog.hadoop_prod", "org.apache.iceberg.spark.SparkCatalog")
                .config("spark.sql.catalog.hadoop_prod.type", "hadoop")
                .config("spark.sql.catalog.hadoop_prod.warehouse", "hdfs://chdp01:9000/iceberg/spark")
                .getOrCreate();
    }
}
