package industry_2024.industry_04.indicator

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.col

object indicator02 {
  def main(args: Array[String]): Unit = {
      /*
            2、编写Scala代码，使用Spark根据dwd_ds_hudi层的fact_change_record表统计每个月（change_start_time的月份）、每个设备、每种
            状态的时长，若某状态当前未结束（即change_end_time值为空）则该状态不参与计算。计算结果存入ClickHouse数据库shtd_industry
            的machine_state_time表中（表结构如下），然后在Linux的ClickHouse命令行中根据设备id、状态持续时长均为降序排序，查询出前10条，将
            SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】
            中对应的任务序号下;
       */
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第二题")
      .config("hive.exec.dynamic.exec.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()


    val hdfs_path="hdfs://192.168.40.110:9000/user/hive/warehouse/hudi_gy_dwd04.db/fact_change_record"

    spark.read.format("hudi").load(hdfs_path)
      .filter(col("changeendtime").isNotNull)
      .dropDuplicates()
      .createOrReplaceTempView("change")

    val result=spark.sql(
      """
        |select  distinct
        |*
        |from(
        |select
        |r1.machine_id,
        |r1.change_record_state,
        |sum(r1.one_duration_time) over(partition by r1.year,r1.month,r1.machine_id,r1.change_record_state) as duration_time,
        |r1.year,
        |r1.month
        |from(
        |select
        |c.changemachineid as machine_id,
        |c.changerecordstate as change_record_state,
        |unix_timestamp(c.changeendtime) - unix_timestamp(c.changestarttime) as one_duration_time,
        |Year(c.changestarttime) as year,
        |Month(c.changestarttime) as month
        |from change as c
        |) as r1
        |) as r2
        |""".stripMargin)

    result.write.mode("append")
      .format("jdbc")
      .option("url","jdbc:clickhouse://192.168.40.110:8123/shtd_industry")
      .option("user","default")
      .option("password","")
      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
      .option("dbtable","machine_state_time04")
      .save()







    spark.close()


  }
}
