from pyspark.sql import SparkSession, functions as F
from pyspark.sql.types import StructType, IntegerType, StringType

if __name__ == '__main__':
    # 构建SparkSession对象
    spark = SparkSession.builder. \
        appName("local[*]"). \
        config("spark.sql.shuffle.partitions", "4"). \
        config("spark.sql.warehouse.dir", "hdfs://node1:8020/user/hive/warehouse"). \
        config("hive.metastore.uris", "thrift://node1:9083"). \
        enableHiveSupport(). \
        getOrCreate()
    # appName 设置程序名称
    # config 设置常用属性。可以通过此来设置配置
    # 最后通过getOrCreate 创建 SparkSession对象

    """
    config("spark.sql.warehouse.dir", "hdfs://node1:8020/user/hive/warehouse")
    告知spark默认创建表的位置
    
    config("hive.metastore.uris", "thrift://node1:9083")
    告知spark hive的metastore地址
    
    enableHiveSupport()
    告知spark，开启对hive的支持
    """
    #

    # 从SparkSession中获取SparkContext
    sc = spark.sparkContext

    spark.sql("""
        select count(*) from test_bucket;
    """).show()