package com.software.process.now

import com.software.util.DBTools
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions.{mean, round}

object CityMeanByProvince {
  Logger.getLogger("org").setLevel(Level.ERROR)

  def main(args: Array[String]): Unit = {
    val spark = DBTools.getSession("AirQuailty","local")

    // 读取数据表
    val url = "jdbc:mysql://localhost:3306/airdb?serverTimezone=GMT%2B8"
    val user = "root"
    val password = "lyf20020511"
    val table1 = "aqi_mean_value"
    val table2 = "city_province"

    val cityDF = spark.read
      .format("jdbc")
      .option("url", url)
      .option("dbtable", table1)
      .option("user", user)
      .option("password", password)
      .load()


    // 读取关联 CSV 文件
    val provinceDF = spark.read
      .format("jdbc")
      .option("url", url)
      .option("dbtable", table2)
      .option("user", user)
      .option("password", password)
      .load()

    // 将两个表连接起来
    val joinedDF = cityDF.join(provinceDF, Seq("city"))

    // 按省份分组，并计算每个省份的平均值
    val resultDF = joinedDF.groupBy("province")
      .agg(round(mean("value"),2).as("value"))

    // 输出结果
    resultDF.show()

    // 将结果保存到数据库中
    resultDF.write.mode(SaveMode.Overwrite)
      .format("jdbc")
      .option("url", "jdbc:mysql://localhost:3306/AirDB?serverTimezone=GMT%2B8")
      .option("driver", "com.mysql.cj.jdbc.Driver")
      .option("user", "root")
      .option("password", "lyf20020511")
      .option("dbtable", "mean_by_province3")
      .mode(SaveMode.Append)
      .save()

    // 停止 SparkSession 对象
    spark.stop()
  }
}