package ods_industry_2024.ods_02.clean

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.{col, current_timestamp, date_format, desc, lit, max, min, row_number, to_timestamp, when}

import java.text.SimpleDateFormat
import java.util.Calendar
import scala.math.Ordering.Implicits.infixOrderingOps

object clean_count {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据清洗")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val day:Calendar=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)
    day.add(Calendar.DATE,-1)
    val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)

//    //  创建数据清洗的方法01(根据operate_time字段排序)
//    def method01(ods_name:String,dwd_name:String,primaryKey:String,preCombineField:String):Unit={
//      spark.table(s"ods_ds_hudi_02.${ods_name}").createOrReplaceTempView("temp01")
//      var ods = spark.table(s"ods_ds_hudi_02.${ods_name}")
//        .where("etl_date=(select max(etl_date) from temp01)") //  这里比赛的话写的是yesterday
//        .drop("etl_date")
//        .withColumn("dwd_insert_user", lit("user1"))
//        .withColumn(
//          "dwd_insert_time",
//          to_timestamp(lit(current_time), "yyyy-MM-dd HH:mm:ss")
//        )
//        .withColumn("dwd_modify_user", lit("user1"))
//        .withColumn(
//          "dwd_modify_time",
//          to_timestamp(date_format(current_timestamp(), "yyyy-MM-dd HH:mm:ss"), "yyyy-MM-dd HH:mm:ss")
//        )
//
//      spark.table(s"dwd_ds_hudi_02.${dwd_name}").createOrReplaceTempView("temp01_1")
//      var dwd = spark.table(s"dwd_ds_hudi_02.${dwd_name}")
//        .where("etl_date=(select max(etl_date) from temp01_1)")
//        .drop("etl_date")
//
//      dwd.unionAll(ods)
//        .withColumn(
//          "dwd_insert_time",
//          min("dwd_insert_time").over(Window.partitionBy("id"))
//        )
//        .withColumn(
//          "dwd_modify_time",
//          max("dwd_modify_time").over(Window.partitionBy("id"))
//        )
//        .withColumn(
//          "row",
//          row_number().over(Window.partitionBy("id").orderBy(desc("operate_time")))
//        )
//        .where(col("row") === 1)
//        .drop("row")
//        .withColumn("etl_date", lit(yesterday))
//        .write.mode("append")
//        .format("hudi")
//        .options(getQuickstartWriteConfigs)
//        .option(RECORDKEY_FIELD.key(), primaryKey)
//        .option(PRECOMBINE_FIELD.key(), preCombineField)
//        .option(PARTITIONPATH_FIELD.key(), "etl_date")
//        .option("hoodie.table.name", dwd_name)
//        .save(s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi_02.db/${dwd_name}")
//
//      println(s"${dwd_name}表格完成")
//    }
//
//
//
//    //  创建数据清洗的方法02(根据create_time字段排序)
//    def method02(ods_name: String, dwd_name: String, primaryKey: String, preCombineField: String): Unit = {
//      spark.table(s"ods_ds_hudi_02.${ods_name}").createOrReplaceTempView("temp01")
//      var ods = spark.table(s"ods_ds_hudi_02.${ods_name}")
//        .where("etl_date=(select max(etl_date) from temp01)") //  这里比赛的话写的是yesterday
//        .drop("etl_date")
//        .withColumn("dwd_insert_user", lit("user1"))
//        .withColumn(
//          "dwd_insert_time",
//          to_timestamp(lit(current_time), "yyyy-MM-dd HH:mm:ss")
//        )
//        .withColumn("dwd_modify_user", lit("user1"))
//        .withColumn(
//          "dwd_modify_time",
//          to_timestamp(date_format(current_timestamp(), "yyyy-MM-dd HH:mm:ss"), "yyyy-MM-dd HH:mm:ss")
//        )
//
//      spark.table(s"dwd_ds_hudi_02.${dwd_name}").createOrReplaceTempView("temp01_1")
//      var dwd = spark.table(s"dwd_ds_hudi_02.${dwd_name}")
//        .where("etl_date=(select max(etl_date) from temp01_1)")
//        .drop("etl_date")
//
//      dwd.unionAll(ods)
//        .withColumn(
//          "dwd_insert_time",
//          min("dwd_insert_time").over(Window.partitionBy("id"))
//        )
//        .withColumn(
//          "dwd_modify_time",
//          max("dwd_modify_time").over(Window.partitionBy("id"))
//        )
//        .withColumn(
//          "row",
//          row_number().over(Window.partitionBy("id").orderBy(desc("create_time")))
//        )
//        .where(col("row") === 1)
//        .drop("row")
//        .withColumn("etl_date", lit(yesterday))
//        .write.mode("append")
//        .format("hudi")
//        .options(getQuickstartWriteConfigs)
//        .option(RECORDKEY_FIELD.key(), primaryKey)
//        .option(PRECOMBINE_FIELD.key(), preCombineField)
//        .option(PARTITIONPATH_FIELD.key(), "etl_date")
//        .option("hoodie.table.name", dwd_name)
//        .save(s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi_02.db/${dwd_name}")
//
//      println(s"${dwd_name}表格完成")
//    }
//
//
//    method01("user_info","dim_user_info","id","operate_time")
//    method02("sku_info","dim_sku_info","id","dwd_modify_time")
//    method02("base_province","dim_province","id","dwd_modify_time")
//    method02("base_region","dim_region","id","dwd_modify_time")
//
//
//  第五题
    spark.table("ods_ds_hudi_02.order_info").createOrReplaceTempView("temp01")
    spark.table("ods_ds_hudi_02.order_info")
      .where("etl_date=(select max(etl_date) from temp01)")
      .drop("etl_date")
      .withColumn(
        "create_time",
        date_format(col("create_time"),"yyyyMMdd")
      )
      .withColumn(
        "operate_time",
        when(col("operate_time").isNull,col("create_time")).otherwise(col("operate_time"))
      )
      .withColumn("dwd_insert_user",lit("user1"))
      .withColumn(
        "dwd_insert_time",
        to_timestamp(lit(current_time),"yyyy-MM-dd HH:mm:ss")
      )
      .withColumn("dwd_modify_user",lit("user1"))
      .withColumn(
        "dwd_modify_time",
        to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
      )
      .withColumn("etl_date",lit(yesterday))
      .write.mode("append")
      .format("hudi")
      .options(getQuickstartWriteConfigs)
      .option(RECORDKEY_FIELD.key(),"id")
      .option(PRECOMBINE_FIELD.key(),"operate_time")
      .option(PARTITIONPATH_FIELD.key(),"etl_date")
      .option("hoodie.table.name","fact_order_info")
      .save("hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi_02.db/fact_order_info")


    println("fact_order_info表格完成")


    //  第六题
    spark.table("ods_ds_hudi_02.order_detail").createOrReplaceTempView("temp06")
    spark.table("ods_ds_hudi_02.order_detail")
      .where("etl_date=(select max(etl_date) from temp06)")
      .drop("etl_date")
      .withColumn(
        "create_time",
        date_format(col("create_time"),"yyyyMMdd")
      )
      .withColumn("dwd_insert_user",lit("user1"))
      .withColumn(
        "dwd_insert_time",to_timestamp(lit(current_time),"yyyy-MM-dd HH:mm:ss")
      )
      .withColumn("dwd_modify_user",lit("user1"))
      .withColumn(
        "dwd_modify_time",
        to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
      )
      .withColumn("etl_date",lit(yesterday))
      .write.mode("append")
      .format("hudi")
      .options(getQuickstartWriteConfigs)
      .option(RECORDKEY_FIELD.key(),"id")
      .option(PRECOMBINE_FIELD.key(),"dwd_modify_time")
      .option(PARTITIONPATH_FIELD.key(),"etl_date")
      .option("hoodie.table.name","fact_order_detail")
      .save("hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi_02.db/fact_order_detail")

    println("fact_order_detail表格完成")








    spark.close()


  }

}
