package industry_2024.industry_05.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{current_timestamp, date_format, lit, to_date, to_timestamp}
import shapeless.syntax.typeable.typeableOps

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

object table_preparation {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("准备dwd层的表和数据")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd05")

    //  fact_change_record
    spark.sql("drop table if exists fact_change_record")
    spark.sql(
      """
        |create table if not exists fact_change_record(
        |ChangeID int,
        |ChangeMachineID int,
        |ChangeMachineRecordID int,
        |ChangeRecordState string,
        |ChangeStartTime timestamp,
        |ChangeEndTime timestamp,
        |ChangeRecordData string,
        |ChangeHandleState int,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etldate string)
        |""".stripMargin)

    //  dim_machine
    spark.sql("drop table if exists dim_machine")
    spark.sql(
      """
        |create table if not exists dim_machine(
        |BaseMachineID int,
        |MachineFactory int,
        |MachineNo string,
        |MachineName string,
        |MachineIP string,
        |MachinePort int,
        |MachineAddDate timestamp,
        |MachineRemarks string,
        |MachineAddEmpID int,
        |MachineResponsEmpID int,
        |MachineLedgerXml string,
        |ISWS int,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etldate string)
        |""".stripMargin)

    //  fact_produce_record
    spark.sql("drop table if  exists  fact_produce_record")
    spark.sql(
      """
        |create table if not exists fact_produce_record(
        |ProduceRecordID int,
        |ProduceMachineID int,
        |ProduceCodeNumber string,
        |ProduceStartWaitTime timestamp,
        |ProduceCodeStartTime timestamp,
        |ProduceCodeEndTime timestamp,
        |ProduceCodeCycleTime int,
        |ProduceEndTime timestamp,
        |ProduceTotalOut int,
        |ProduceInspect int,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etldate string)
        |""".stripMargin)

    //  fact_machine_data
    spark.sql("drop table if exists fact_machine_data")
    spark.sql(
      """
        |create table if not exists fact_machine_data(
        |MachineRecordID int,
        |MachineID int,
        |MachineRecordState string,
        |MachineRecordData string,
        |MachineRecordDate timestamp,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etldate string)
        |""".stripMargin)

    spark.sql("show tables").show

    //  插入数据

    def insert_data(ods_name:String,dwd_name:String):Unit={
      val day=Calendar.getInstance()
      val time=new SimpleDateFormat("yyyy-MM-dd HH;mm:ss").format(day.getTime)
      day.add(Calendar.DATE,-1)
      val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)

      spark.table(s"ods05.${ods_name}")
        .drop("etldate")
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn(
          "dwd_insert_time",
          lit(to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss"))
        )
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn(
          "dwd_modify_time",
          lit(to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss"))
        )
        .withColumn("etldate",lit(yesterday))
//        .createOrReplaceTempView("temp")
        .write.mode("append")
        .format("hive")
        .partitionBy("etldate")
        .saveAsTable(dwd_name)

//      spark.sql("desc temp").show
//      spark.sql(s"desc dwd05.${dwd_name}").show

    }

    insert_data("changerecord","fact_change_record")
    insert_data("basemachine","dim_machine")
    insert_data("producerecord","fact_produce_record")
    insert_data("machinedata","fact_machine_data")






    spark.close()
  }

}
