package industry_2024.industry_05.indicator

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

import java.util.Properties

object indicator03 {
  def main(args: Array[String]): Unit = {

    /*
        3、编写Scala代码，使用Spark根据dwd层的fact_change_record表和dim_machine表统计，计算每个车间设备的月平均运行时长与所有设备的月平
        均运行时长对比结果（即设备状态为“运行”，结果值为：高/低/相同），月份取值使用状态开始时间的月份，若某设备的运行状态当前未结
        束（即change_end_time值为空）则该状态不参与计算，计算结果存入MySQL数据库shtd_industry的machine_running_compare表中（表结构如下），然后
        在Linux的MySQL命令行中根据车间号降序排序，查询出前2条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将
        执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下;
     */

    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第三题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd05.fact_change_record")
      .where(col("changerecordstate")==="运行")
      .filter(col("changeendtime").isNotNull)
      .createOrReplaceTempView("fact_change_record")

    spark.table("dwd05.dim_machine")
      .createOrReplaceTempView("dim_machine")

    val result=spark.sql(
      """
        |with
        |month_avg_running_time as (
        |select
        |date_format(f.changestarttime,"yyyy-MM") as start_month,
        |d.machinefactory as machine_factory,
        |avg(unix_timestamp(changeendtime) - unix_timestamp(changestarttime)) as factory_avg
        |from fact_change_record as f
        |join dim_machine as d
        |group by date_format(f.changestarttime,"yyyy-MM"),d.machinefactory
        |),
        |all_avg_running_time as (
        |select
        |date_format(f.changestarttime,"yyyy-MM") as start_month,
        |avg(unix_timestamp(f.changeendtime) - unix_timestamp(f.changestarttime)) as company_avg
        |from fact_change_record as f
        |group by date_format(f.changestarttime,"yyyy-MM")
        |)
        |
        |select
        |month.start_month,
        |month.machine_factory,
        |case
        |when month.factory_avg > all.company_avg then "高"
        |when month.factory_avg < all.company_avg then "低"
        |else "相同"
        |end
        |as comparison,
        |month.factory_avg,
        |all.company_avg
        |from month_avg_running_time as month
        |join all_avg_running_time as all
        |on all.start_month=month.start_month
        |""".stripMargin)

    val mysql_connect=new Properties()
    mysql_connect.setProperty("user","root")
    mysql_connect.setProperty("password","123456")
    mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false","machine_running_compare05",mysql_connect)








    spark.close()
  }

}
