package com.chinasoft.scala

import org.apache.spark.sql.SparkSession
import java.util.Properties

object SparkConfig {

  def getSparkSession(appName: String = "PollutionAnalysis"): SparkSession = {
    // 设置Hadoop用户为atguigu，解决HDFS权限问题
    System.setProperty("HADOOP_USER_NAME", "atguigu")

    SparkSession
      .builder()
      .appName(appName)
      .enableHiveSupport()
      .master("local[*]")
      // Hive Metastore配置
      .config("hive.metastore.uris", "thrift://192.168.16.100:9083")
      .config("spark.sql.warehouse.dir", "/user/hive/warehouse")
      // 启用Hive支持
      .config("spark.sql.catalogImplementation", "hive")
      // Hive动态分区配置 - 解决分区问题
      .config("hive.exec.dynamic.partition", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("hive.exec.max.dynamic.partitions", "1000")
      .config("hive.exec.max.dynamic.partitions.pernode", "100")
      // Spark性能优化配置
      .config("spark.sql.adaptive.enabled", "true")
      .config("spark.sql.adaptive.coalescePartitions.enabled", "true")
      .getOrCreate()
  }

  def getMySQLProperties(): Properties = {
    val prop = new Properties()
    prop.put("user", "root")
    prop.put("password", "password")
    prop.put("driver", "com.mysql.jdbc.Driver")
    prop.put("batchsize", "10000")
    prop.put("rewriteBatchedStatements", "true")
    // 添加兼容旧版驱动的字符编码配置
    prop.put("useUnicode", "true")
    prop.put("characterEncoding", "utf8")
    prop.put("connectionCollation", "utf8_general_ci")
    prop
  }

  def getMySQLUrl(): String = {
    "jdbc:mysql://192.168.16.100:3306/airTest?useSSL=false&serverTimezone=UTC&characterEncoding=utf8&useUnicode=true"
  }

  def getKeyCities(): List[String] = {
    List("北京", "上海", "广州", "深圳", "成都", "武汉", "西安", "沈阳", "南京", "杭州")
  }

}
