package DianShang_2024.ds_06.clean

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, date_format, date_sub, lit}

import java.text.SimpleDateFormat
import java.util.Date

object dwd_ds_hudi_table_toCreate_data {
  def main(args: Array[String]): Unit = {
    //  准备spark集成hudi的环境
     val spark=SparkSession.builder()
       .master("local[*]")
       .appName("准备数据")
       .config("hive.exec.dynamic.partition.mode","nonstrict")
       .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
       .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
       .enableHiveSupport()
       .getOrCreate()

    /*  1.由于考虑到数据清洗，如果存在相同id的数据，需要按照create_time或者operate_time获取最新的数据，所以为了方便练习看到效果，需要准备数据
     的时候将这两个字段的值减少两天，这样清洗进去之后，可以看到明显的效果,另外这样准备数据的时候也可以不更改birthday这些需要添加00:00:00的数据
     ，因为到时候做题目的时候再更改，反正也会把全部的数据抽取进去
        2.只有user_info和order_info存在create_time和operate_time两个字段，其余表只有create_time
     */

    //  创建将create_time和operate_time两个字段都存在的表方法
    def write_hudi01(ods_table_name:String,dwd_table_name:String,pre_col:String,pri_col:String): Unit = {
      val ods_base_path=s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/${ods_table_name}"
      val dwd_base_path=s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/${dwd_table_name}"
      //  date_sub(date,2)将时间值减少2天,但是会返回一个date类型的数据，但是hudi是不支持这个数据类型的，所以需要转化为 String
      //  并且date_sub()不会保留后面的时分秒
      spark.read.format("hudi").load(ods_base_path)
        .where(
          col("etl_date")==="20231027"
        )
        .drop("etl_date")
        .withColumn(
          "create_time",date_sub(col("create_time"),2).cast("String")
        )
        .withColumn(
          "operate_time",date_sub(col("operate_time"),2).cast("String")
        )
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn(
          "dwd_insert_time",lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
        )
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn(
          "dwd_modify_time",lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
        )
        .withColumn("etl_date",lit("20240101"))
        .write.mode("append").format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(PRECOMBINE_FIELD.key(),pre_col)
        .option(RECORDKEY_FIELD.key(),pri_col)
        .option(PARTITIONPATH_FIELD.key(),"etl_date")
        .option("hoodie.table.table",dwd_table_name)
        .save(dwd_base_path)
    }

    //  创建将只有create_time字段的表数据写入清洗表的方法
    def write_hudi02(ods_table_name: String, dwd_table_name: String, pre_col: String, pri_col: String): Unit = {
      val ods_base_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/${ods_table_name}"
      val dwd_base_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/${dwd_table_name}"
      //  date_sub(date,2)将时间值减少2天,
      spark.read.format("hudi").load(ods_base_path)
        .where(
          col("etl_date") === "20231027"
        )
        .drop("etl_date")
        .withColumn(
          "create_time", date_sub(col("create_time"), 2).cast("String")
        )
        .withColumn("dwd_insert_user", lit("user1"))
        .withColumn(
          "dwd_insert_time", lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
        )
        .withColumn("dwd_modify_user", lit("user1"))
        .withColumn(
          "dwd_modify_time", lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
        )
        .withColumn("etl_date", lit("20240101"))
        .write.mode("append").format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(PRECOMBINE_FIELD.key(), pre_col)
        .option(RECORDKEY_FIELD.key(), pri_col)
        .option(PARTITIONPATH_FIELD.key(), "etl_date")
        .option("hoodie.table.table", dwd_table_name)
        .save(dwd_base_path)
    }

    //  准备写入数据
    write_hudi01("user_info","dim_user_info","operate_time","id")
    write_hudi01("order_info","fact_order_info","operate_time","id")

    write_hudi02("sku_info","dim_sku_info","dwd_modify_time","id")
    write_hudi02("base_province","dim_province","dwd_modify_time","id")
    write_hudi02("base_region","dim_region","dwd_modify_time","id")
    write_hudi02("order_detail","fact_order_detail","dwd_modify_time","id")









    //  关闭环境
    spark.close()
  }

}
