package DianShang_2024.ds_07.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, current_timestamp, date_format, lit, to_timestamp}

import java.text.SimpleDateFormat
import java.util.Calendar

object clean06 {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据清洗第六题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val day=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)
    day.add(Calendar.DATE,-1)
    val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)

    spark.table("ods07.order_detail").createOrReplaceTempView("temp01")
    spark.table("ods07.order_detail")
      .where("etl_date=(select max(etl_date) from temp01)")
      .drop("etl_date")
      .withColumn("create_time",to_timestamp(date_format(col("create_time"),"yyyyMMdd")))
      .withColumn("dwd_insert_user",lit("user1"))
      .withColumn("dwd_insert_time",to_timestamp(lit(current_time)))
      .withColumn("dwd_modify_user",lit("user1"))
      .withColumn(
        "dwd_modify_time",
        to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"))
      )
      .withColumn("etl_date",lit(yesterday))
      .write.mode("append")
      .format("hive")
      .partitionBy("etl_date")
      .saveAsTable("dwd07.fact_order_detail")

    println("完成")







    spark.close()
  }

}
