package industry_2024.industry_09.indicator

import org.apache.spark.sql.SparkSession

import java.util.Properties

object indicator04 {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第四题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd09.fact_environment_data")
      .createOrReplaceTempView("data")

    val result=spark.sql(
      """
        |select distinct
        |base_id,machine_avg,factory_avg,
        |case
        |when machine_avg > factory_avg then "高"
        |when machine_avg < factory_avg then "低"
        |else "相同"
        |end
        |as comparison,
        |year as env_date_year,
        |month as env_date_month
        |from(
        |select
        |baseid as base_id,
        |avg(cast(humidity as int)) over(partition by baseid,Year(inputtime),Month(inputtime)) as machine_avg,
        |avg(cast(humidity as int)) over(partition by Year(inputtime),Month(inputtime)) as factory_avg,
        |Year(inputtime) as year,
        |Month(inputtime) as month
        |from data
        |) as r1
        |""".stripMargin)

    val connect=new Properties()
      connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false","machine_humidityAVG_compare09",connect)


    spark.close()
  }

}
