from pyspark.sql import SparkSession


class HSpark:
    @staticmethod
    def create_spark():
        # 构建spark session
        builder = SparkSession.builder \
            .appName('test') \
            .master("local[*]") \
            .enableHiveSupport() \
            .config("hive.exec.dynamic.partition", "true") \
            .config("hive.exec.dynamic.partition.mode", "nonstrict")

        spark_args = {
            "hive.metastore.uris": "thrift://11.50.138.179:9083,thrift://11.50.138.181:9083",
            "spark.sql.warehouse.dir": "hdfs://laoban-test/user/hive/warehouse",
            "hive.spark.client.connect.timeout": 5000,
            "spark.jars.packages": "mysql:mysql-connector-java:8.0.26",
            "spark.sql.debug.maxToStringFields": 100
        }
        if spark_args:
            for key, value in spark_args.items():
                builder.config(key, value)
        return builder.getOrCreate()


spark = HSpark.create_spark()
