package ds_industry_2025.industry.gy_05.T3


import org.apache.spark.sql.functions._
import org.apache.spark.sql.{SparkSession, functions}

import java.util.Properties
/*
      4、编写Scala代码，使用Spark根据dwd层的fact_change_record表展示每一个设备最近第二次的状态（倒数第二次），时间字段选
      用change_start_time，如果设备仅有一种状态，返回该状态（一个设备不会同时拥有两种状态），存入MySQL数据库shtd_industry的
      recent_state表中（表结构如下），然后在Linux的MySQL命令行中根据设备id降序排序，查询出前5条，将SQL语句复制粘贴至客户端
      桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中
      对应的任务序号下；
 */
object t6 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t6")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val hdfs="hdfs://192.168.40.110:9000/user/hive/warehouse/hudi_gy_dwd.db/fact_change_record"

    spark.read.format("hudi").load(hdfs)
      .createOrReplaceTempView("data")

    spark.sql("select changemachineid,count(*) as number from data group by changemachineid").show

    val r1 = spark.sql(
      """
        |select
        |*
        |from(
        |select distinct
        |changemachineid as machine_id,
        |changerecordstate as state,
        |changestarttime as start_time,
        |changeendtime as endtime,
        |row_number() over(partition by changemachineid order by changestarttime desc) as row
        |from data
        |) as r1
        |where row < 3
        |""".stripMargin)

    //  todo 拿到只有一种状态的数据
    val t1_id = r1.groupBy("machine_id")
      .agg(functions.count("*").as("number"))
      .filter(col("number") === 1)
      .select("machine_id")
      .withColumnRenamed("machine_id","ma_id")
      .distinct()

    val t1 = r1.join(t1_id, t1_id("ma_id") === r1("machine_id"), "inner")
      .drop("ma_id")
      .drop("row")


    //  todo 拿到其他拥有多个状态机器的倒数第二个状态
    val t2 = r1.filter(col("row") === 2)
      .drop("row")

    //  todo 结果
    val result = t2.unionAll(t1)

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")

    result.show

    //  todo Invalid default value for 'endtime'
    //	上面报错表示无效的默认值  字段为"endtime" ,所以是说这个字段的默认值和mysql存在兼容问题，下面把它转化为str
    result.withColumn("endtime",col("endtime").cast("String"))
      .write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false","recent_state",conn)










    spark.close()
  }

}
