package ds_industry_2025.industry.gy_09.T4

import org.apache.spark.sql.SparkSession
import org.dom4j.DocumentHelper

import java.sql.Timestamp
import java.text.SimpleDateFormat
import java.util.Properties

/*
    1、根据dwd库中fact_machine_data表（或MySQL的shtd_industry库中MachineData表），根据以下要求转换：获取最大分区
    （MySQL不用考虑）的数据后，首先解析列machine_record_data（MySQL中为MachineRecordData）的数据（数据格式为xml，
    采用dom4j解析，解析demo在客户端/home/ubuntu/Documents目录下），并获取每条数据的主轴转速，主轴倍率，主轴负载，进给倍率，进
    给速度，PMC程序号，循环时间，运行时间，有效轴数，总加工个数，已使用内存，未使用内存，可用程序量，注册程序量等相关的值（若
    该条数据没有相关值，则按下表设置默认值），同时转换machine_record_state字段的值，若值为报警，则填写1，否则填写0，以下
    为表结构，将数据保存在dwd.fact_machine_learning_data，使用cli按照machine_record_id升序排序，查
    询dwd.fact_machine_learning_data前1条数据，将结果截图粘贴至客户端桌面【Release\任务C提交结果.docx】中对应的任务序号下。
dwd.fact_machine_learning_data表结构：
 */
object t1 {
  def main(args: Array[String]): Unit = {
    val start_time=System.currentTimeMillis()
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t1")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()


    spark.conf.set("spark.sql.shuffle.partitions", "500")
    import spark.implicits._

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")

    val data = spark.read
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false", "MachineData", conn)

    val result = data.map(
      r => {
        val m = new machine()
        m.machine_record_id = r.getAs[Int](0)
        m.machine_id = r.getAs[Int](1).toDouble
        m.machine_record_state = if (r.getAs[String](2).equals("报警")) 1.0 else 0.0
        m.machine_record_date = new Timestamp(
          new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(r(4).toString).getTime
        )
        //  todo 首先判断该字段的值是否为空值(因为mysql表里面可能可以允许字段为空)
        if (r.getAs[String](3) != null) {
          val str = s"<row>${r.getAs[String](3)}</row>"
          val document = DocumentHelper.parseText(str)
          val root = document.getRootElement
          val et = root.elementIterator()
          while (et.hasNext) {
            val element = et.next()
            val colname = element.attributeValue("ColName")
            //  todo 由于上面已经判断了该字段的值不是空，然后我们应该盘判断一下该行标签里面的元素是否为空
            //  todo 所以下面用的是字符串格式的null
            if (!element.getTextTrim.equals("null") && element.getTextTrim.nonEmpty) {
              colname match {
                case "主轴转速" => m.machine_record_mainshaft_speed = element.getTextTrim.toDouble
                case "主轴倍率" => m.machine_record_mainshaft_multiplerate = element.getTextTrim.toDouble
                case "主轴负载" => m.machine_record_mainshaft_load = element.getTextTrim.toDouble
                case "进给倍率" => m.machine_record_feed_speed = element.getTextTrim.toDouble
                case "进给速度" => m.machine_record_feed_multiplerate = element.getTextTrim.toDouble
                case "PMC程序号" => m.machine_record_pmc_code = element.getTextTrim.toDouble
                case "循环时间" => m.machine_record_circle_time = element.getTextTrim.toDouble
                case "运行时间" => m.machine_record_run_time = element.getTextTrim.toDouble
                case "有效轴数" => m.machine_record_effective_shaft = element.getTextTrim.toDouble
                case "总加工个数" => m.machine_record_amount_process = element.getTextTrim.toDouble
                case "已使用内存" => m.machine_record_use_memory = element.getTextTrim.toDouble
                case "未使用内存" => m.machine_record_free_memory = element.getTextTrim.toDouble
                case "可用程序量" => m.machine_record_amount_use_code = element.getTextTrim.toDouble
                case "注册程序量" => m.machine_record_amount_free_code = element.getTextTrim.toDouble
                case _ => ""
              }
            }
          }
        }
        //  todo 返回数据
        m
      }
    )

    result.show

    result.write.format("hive").mode("overwrite")
      .saveAsTable("dwd.fact_machine_learning_data")

    val end_time=System.currentTimeMillis()
    println(end_time - start_time)


    spark.close()
  }
case class machine(
                    var machine_record_id:Int=0,
                    var machine_id:Double=0.0,
                    var machine_record_state:Double=0.0,
                    var machine_record_mainshaft_speed:Double=0.0,
                    var machine_record_mainshaft_multiplerate:Double=0.0,
                    var machine_record_mainshaft_load:Double=0.0,
                    var machine_record_feed_speed:Double=0.0,
                    var machine_record_feed_multiplerate:Double=0.0,
                    var machine_record_pmc_code:Double=0.0,
                    var machine_record_circle_time:Double=0.0,
                    var machine_record_run_time:Double=0.0,
                    var machine_record_effective_shaft:Double=0.0,
                    var machine_record_amount_process:Double=0.0,
                    var machine_record_use_memory:Double=0.0,
                    var machine_record_free_memory:Double=0.0,
                    var machine_record_amount_use_code:Double=0.0,
                    var machine_record_amount_free_code:Double=0.0,
                    var machine_record_date:Timestamp=null,
                    var dwd_insert_user:String="user1",
                    var dwd_insert_time:Timestamp=new Timestamp(System.currentTimeMillis()),
                    var dwd_modify_user:String="user1",
                    var dwd_modify_time:Timestamp=new Timestamp(System.currentTimeMillis())
                  )
}
