package ds_industry_2025.industry.gy_09.T3

import org.apache.spark.sql.SparkSession

import java.util.Properties

/*
    4、编写Hive SQL代码，根据dwd层的fact_environment_data表，统计检测设备（BaseID）每月的平均湿度（Humidity），然后将每个设
    备的每月平均湿度与厂内所有检测设备每月检测结果的平均湿度做比较（结果值为：高/低/相同）存入MySQL数据库shtd_industry
    的machine_humidityAVG_compare表中（表结构如下），然后在Linux的MySQL命令行中根据检测设备ID降序排序，查询出前5条，将SQL语
    句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结
    果.docx】中对应的任务序号下；
 */
object t4 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t4")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd.fact_environment_data")
      .createOrReplaceTempView("data")

    val result = spark.sql(
      """
        |select
        |baseid,machine_avg,factory_avg,
        |case
        |when machine_avg > factory_avg then "高"
        |when machine_avg < factory_avg then "低"
        |else "相同"
        |end as comparison,
        |year,
        |month
        |from(
        |select distinct
        |baseid,
        |avg(humidity) over(partition by year,month,baseid) as machine_avg,
        |avg(humidity) over() as factory_avg,
        |year,month
        |from(
        |select
        |baseid,
        |humidity,
        |year(inputtime) as year,
        |month(inputtime) as month
        |from data
        |) as r1
        |) as r2
        |""".stripMargin)

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false","machine_humidityAVG_compare",conn)

    // todo select * from shtd_industry.machine_humidityAVG_compare order by baseid desc limit 5;






    spark.close()

  }

}
