package work2

import org.apache.spark.sql.SparkSession
import java.sql.{Connection, DriverManager}

object AdsLogisticsMonthAmount {
  def main(args: Array[String]): Unit = {
    // 设置Hadoop家目录
    val hadoopHome = "C:\\hadoop\\hadoop-3.2.2"
    System.setProperty("hadoop.home.dir", hadoopHome)
    System.load(hadoopHome + "\\bin\\hadoop.dll")

    // 创建SparkSession
    val spark: SparkSession = SparkSession
      .builder
      .appName("AdsLogisticsMonthAmount")
      .master("local[*]")
      .config("spark.sql.warehouse.dir", "hdfs://192.168.17.150:9000/user/hive/warehouse")
      .config("hive.metastore.uris", "thrift://192.168.17.150:9083")
      .config("spark.sql.hive.metastore.jars", "file:///C:/Users/LZR/Desktop/shi_jian/apache-hive-3.1.3-bin/lib/*")
      .config("spark.sql.hive.metastore.version", "3.1.3")
      .config("spark.sql.hive.execution.version", "3.1.3")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._

    // 使用Hive数据库
    spark.sql("USE metrics")

    // 从Hive读取数据并处理（示例逻辑，实际需根据业务调整）
    val monthAmountDF = spark.sql(
      """
        |SELECT
        |  DATE_FORMAT(sales_time, 'yyyy-MM') AS month_key,
        |  SUM(sales_amount) AS sales_amount
        |FROM dwd_logistics_data
        |GROUP BY DATE_FORMAT(sales_time, 'yyyy-MM')
        """.stripMargin)

    monthAmountDF.show()

    // MySQL连接信息
    val url = "jdbc:mysql://localhost:3306/metrics?useSSL=false"
    val user = "root"
    val password = "abc123"

    // 创建数据库连接并建表
    val connection: Connection = DriverManager.getConnection(url, user, password)
    val statement = connection.createStatement()
    val createTableSQL = """
      CREATE TABLE IF NOT EXISTS ads_logistics_month_amount (
        month_key VARCHAR(255),
        sales_amount DECIMAL(10,2)
      ) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci
    """
    statement.executeUpdate(createTableSQL)
    statement.close()
    connection.close()

    // 写入MySQL
    monthAmountDF.write
      .format("jdbc")
      .option("url", url)
      .option("dbtable", "ads_logistics_month_amount")
      .option("user", user)
      .option("password", password)
      .mode("overwrite")
      .save()

    spark.stop()
  }
}