package industry_2024.industry_05.indicator

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

import java.util.Properties

object indicator04 {
  def main(args: Array[String]): Unit = {
    /*
        编写Scala代码，使用Spark根据dwd层的fact_change_record表展示每一个设备最近第二次的状态（倒数第二次），时间字段选用change_start_time，如果
        设备仅有一种状态，返回该状态（一个设备不会同时拥有两种状态），存入MySQL数据库shtd_industry的recent_state表中（表结构如下），然后在Linux的
        MySQL命令行中根据设备id降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果
        截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
     */

    //   todo 这一题答案有问题，因为没时间就没改了，直接去看2025年我写的是对的
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第四题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd05.fact_change_record")
      .createOrReplaceTempView("fact_change_record")

    val result=spark.sql(
      """
        |select
        |r1.machine_id,
        |r1.record_state,
        |r1.change_start_time,
        |r1.change_end_time
        |from(
        |select
        |f.changemachineid as machine_id,
        |f.changerecordstate as record_state,
        |row_number() over(partition by f.changemachineid order by f.changestarttime desc) as row,
        |f.changestarttime as change_start_time,
        |f.changeendtime as change_end_time
        |from fact_change_record as f
        |) as r1
        |where row=2
        |""".stripMargin)

    val mysql_connect=new Properties()
    mysql_connect.setProperty("user","root")
    mysql_connect.setProperty("password","123456")
    mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")
    result.show()

result
  .withColumn("change_start_time",col("change_start_time").cast("string"))
  .withColumn("change_end_time",col("change_end_time").cast("string"))
  .write.mode("overwrite")
  .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false","recent_state05",mysql_connect)

    spark.close()

  }

}
