package ds_industry_2025.industry.gy_05.T3

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

import java.util.Properties

/*
    3、编写Scala代码，使用Spark根据dwd层的fact_change_record表和dim_machine表统计，计算每个车间设备的月平均运行时长与所有设
    备的月平均运行时长对比结果（即设备状态为“运行”，结果值为：高/低/相同），月份取值使用状态开始时间的月份，若某设备的运行状态当前
    未结束（即change_end_time值为空）则该状态不参与计算，计算结果存入MySQL数据库shtd_industry的machine_running_compare表
    中（表结构如下），然后在Linux的MySQL命令行中根据车间号降序排序，查询出前2条，将SQL语句复制粘贴至客户端桌面【Release\任务B提
    交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下;
 */
object t3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t3")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

      spark.table("dwd.fact_change_record")
        .where(col("changerecordstate")==="运行")
        .where(col("changeendtime").isNotNull)
        .createOrReplaceTempView("change_record")

    spark.table("dwd.dim_machine")
      .createOrReplaceTempView("dim_machine")


    val result = spark.sql(
      """
        |select
        |start_month,factory_id as machine_factory,
        |case
        |when factory_avg > company_avg then '高'
        |when factory_avg < company_avg then '低'
        |else '相同'
        |end as comparison,
        |factory_avg,
        |company_avg
        |from(
        |select distinct
        |factory_id,start_month,
        |avg(run_time) over() as company_avg,
        |avg(run_time) over(partition by factory_id,start_month) as factory_avg
        |from(
        |select
        |d.machinefactory as factory_id,
        |concat(
        |cast(year(changestarttime) as String),
        |"-",
        |cast(month(changestarttime) as String)
        |) as start_month,
        |(unix_timestamp(changeendtime) - unix_timestamp(changestarttime)) as run_time
        |from change_record as c
        |join dim_machine as d
        |on d.basemachineid=c.changemachineid
        |) as r1
        |) as r2
        |""".stripMargin)

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")



    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false","machine_running_compare",conn)







    spark.close()

  }

}
