from pyspark.sql import SparkSession


class LocalSpark:
    @staticmethod
    def create_spark() -> SparkSession:
        # 构建spark session
        builder = SparkSession.builder \
            .appName('test') \
            .master("local[*]") \
            .enableHiveSupport() \
            .config("hive.exec.dynamic.partition", "true") \
            .config("hive.exec.dynamic.partition.mode", "nonstrict")

        # spark_args = {
        #     "spark.sql.debug.maxToStringFields": 200
        # }
        # if spark_args:
        #     for key, value in spark_args.items():
        #         builder.config(key, value)
        return builder.getOrCreate()


spark = LocalSpark.create_spark()
sc = spark.sparkContext
