package DianShang_2024.ds_06.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, lit}
import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.config.HoodieWriteConfig._

import java.text.SimpleDateFormat
import java.util.{Date, Properties}

object table_data_preparation {
  def main(args: Array[String]): Unit = {


    //  不知道为啥会报错








    //  准备spark集成hudi的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("创建表")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd_ds_hudi")



    //  dim_user_info(base:保存)
   val ods_base_path="hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/user_info"
   val dwd_base_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/dim_user_info"
   // 读取ods层的数据创建临时表
   spark.read.format("hudi").load(ods_base_path)
     .where( col("etl_date")===lit("20231027") )    //  选择指定分区的数据
     .drop("etl_date")
     .withColumn(
       "dwd_insert_user",
       lit("user1")
     )
     .withColumn(
       "dwd_insert_time",
       lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
     )
     .withColumn(
       "dwd_modify_user",
       lit("user1")
     )
     .withColumn(
       "dwd_modify_time",
       lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
     )
     .withColumn("etl_date",lit("20231027"))
     .createOrReplaceTempView("temp01")



   // 根据创建的临时表数据来创建dwd层的hudi表
    spark.sql(
      s"""
        |create table  if not exists dim_user_info
        |using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date )
        |as
        |select * from temp01
        |""".stripMargin)








    //  关闭环境
    spark.close()
  }

}
