package ds_industry_2025.ds

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, date_format, lit, to_timestamp, when}

import java.text.SimpleDateFormat
import java.util.Calendar

// todo 电商卷子hudi类型 dwd层的表格创建
object hudi_dwd_table_perparation {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t1")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val day=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)

    //  todo 创建表和数据库的方法
    def create_database_table():Unit={
      //  todo 这里删除了数据库还需要去hdfs上面手动删除元数据
      spark.sql("drop database if exists dwd_ds_hudi cascade")
      spark.sql("create database if not exists dwd_ds_hudi")
      spark.sql("use dwd_ds_hudi")

      //  dim_user_info
      spark.sql("drop table if exists dim_user_info")
      spark.sql(
        """
          |create table if not exists dim_user_info(
          |id int,
          |login_name string,
          |nick_name string,
          |passwd string,
          |name string,
          |phone_num string,
          |email string,
          |head_img string,
          |user_level string,
          |birthday timestamp,
          |gender string,
          |create_time timestamp,
          |operate_time timestamp,
          |dwd_insert_user String,
          |dwd_insert_time timestamp,
          |dwd_modify_user String,
          |dwd_modify_time timestamp
          |)using hudi
          |tblproperties(
          |type="cow",
          |primaryKey="id",
          |preCombineField="operate_time",
          |hoodie.datasource.hive_aync.mode="hms"
          |)
          |partitioned by(etl_date String)
          |""".stripMargin)
      println("dim_user_info表创建完成")

      //  dim_sku_info
      spark.sql("drop table if exists dim_sku_info")
      spark.sql(
        """
          |create table if not exists dim_sku_info(
          |id int,
          |spu_id int,
          |price decimal(10,0),
          |sku_name string,
          |sku_desc string,
          |weight decimal(10,2),
          |tm_id int,
          |category3_id int,
          |sku_default_img string,
          |create_time timestamp,
          |dwd_insert_user String,
          |dwd_insert_time timestamp,
          |dwd_modify_user String,
          |dwd_modify_time timestamp
          |)using hudi
          |tblproperties(
          |type="cow",
          |primaryKey="id",
          |preCombineField="dwd_modify_time",
          |hoodie.datasource.hive_aync.mode="hms"
          |)
          |partitioned by(etl_date String)
          |""".stripMargin)
      println("dim_sku_info表创建完成")

      //  dim_province
      spark.sql("drop table if exists dim_province")
      spark.sql(
        """
          |create table if not exists dim_province(
          |id int,
          |name string,
          |region_id string,
          |area_code string,
          |iso_code string,
          |create_time timestamp,
          |dwd_insert_user String,
          |dwd_insert_time timestamp,
          |dwd_modify_user String,
          |dwd_modify_time timestamp
          |)using hudi
          |tblproperties(
          |type="cow",
          |primaryKey="id",
          |preCombineField="dwd_modify_time",
          |hoodie.datasource.hive_aync.mode="hms"
          |)
          |partitioned by(etl_date String)
          |""".stripMargin)
      println("dim_province表创建完成")

      //  dim_region
      spark.sql("drop table if exists dim_region")
      spark.sql(
        """
          |create table if not exists dim_region(
          |id string,
          |region_name string,
          |create_time timestamp,
          |dwd_insert_user String,
          |dwd_insert_time timestamp,
          |dwd_modify_user String,
          |dwd_modify_time timestamp
          |)using hudi
          |tblproperties(
          |type="cow",
          |primaryKey="id",
          |preCombineField="dwd_modify_time",
          |hoodie.datasource.hive_aync.mode="hms"
          |)
          |partitioned by(etl_date String)
          |""".stripMargin)
      println("dim_region表创建完成")

      //  fact_order_info
      spark.sql("drop table if exists fact_order_info")
      spark.sql(
        """
          |create table if not exists fact_order_info(
          |id int,
          |consignee string,
          |consignee_tel string,
          |final_total_amount decimal(16,2),
          |order_status string,
          |user_id int,
          |delivery_address string,
          |order_comment string,
          |out_trade_no string,
          |trade_body string,
          |create_time timestamp,
          |operate_time timestamp,
          |expire_time timestamp,
          |tracking_no string,
          |parent_order_id int,
          |img_url string,
          |province_id int,
          |benefit_reduce_amount decimal(16,2),
          |original_total_amount decimal(16,2),
          |feight_fee decimal(16,2),
          |dwd_insert_user String,
          |dwd_insert_time timestamp,
          |dwd_modify_user String,
          |dwd_modify_time timestamp
          |)using hudi
          |tblproperties(
          |type="cow",
          |primaryKey="id",
          |preCombineField="operate_time",
          |hoodie.datasource.hive_aync.mode="hms"
          |)
          |partitioned by(etl_date String)
          |""".stripMargin)
      println("fact_order_info表创建完成")

      //  fact_order_detail
      spark.sql("drop table if exists fact_order_detail")
      spark.sql(
        """
          |create table if not exists fact_order_detail(
          |id int,
          |order_id int,
          |sku_id int,
          |sku_name string,
          |img_url string,
          |order_price decimal(10,2),
          |sku_num string,
          |create_time timestamp,
          |source_type string,
          |source_id int,
          |dwd_insert_user String,
          |dwd_insert_time timestamp,
          |dwd_modify_user String,
          |dwd_modify_time timestamp
          |)using hudi
          |tblproperties(
          |type="cow",
          |primaryKey="id",
          |preCombineField="dwd_modify_time",
          |hoodie.datasource.hive_aync.mode="hms"
          |)
          |partitioned by(etl_date String)
          |""".stripMargin)
      println("fact_order_detail表创建完成")
    }

    //  todo 向dwd层写入数据
    def ods_to_dwd(ods_name:String,dwd_name:String,precombine_field:String):Unit={
      val ods_path=s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/${ods_name}"
      val dwd_path=s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/${dwd_name}"

      spark.read.format("hudi").load(ods_path)
        .where(col("etl_date")==="20241104")
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn("dwd_insert_time",to_timestamp(lit(current_time)))
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn("dwd_modify_time",to_timestamp(lit(current_time)))
        .withColumn("etl_date",lit("20241104"))
        .write.format("hudi").mode("append")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(),"id")
        .option(PRECOMBINE_FIELD.key(),precombine_field)
        .option(PARTITIONPATH_FIELD.key(),"etl_date")
        .option("hoodie.table.name",dwd_name)
        .save(dwd_path)
      println(s"${dwd_name}表数据写入完成")
    }

    //  todo 向dwd层写入数据  单独为fact_order_info准备，为了后面指标计算使用
    def ods_to_dwd02(ods_name: String, dwd_name: String, precombine_field: String): Unit = {
      val ods_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/${ods_name}"
      val dwd_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/${dwd_name}"

      spark.read.format("hudi").load(ods_path)
        .where(col("etl_date") === "20241104")
        .withColumn(
          "operate_time",
          when(col("operate_time").isNull,col("create_time")).otherwise(col("operate_time"))
        )
        .withColumn("dwd_insert_user", lit("user1"))
        .withColumn("dwd_insert_time", to_timestamp(lit(current_time)))
        .withColumn("dwd_modify_user", lit("user1"))
        .withColumn("dwd_modify_time", to_timestamp(lit(current_time)))
        .withColumn("etl_date",lit("20241104"))
        .write.format("hudi").mode("append")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(), "id")
        .option(PRECOMBINE_FIELD.key(), precombine_field)
        .option(PARTITIONPATH_FIELD.key(), "etl_date")
        .option("hoodie.table.name", dwd_name)
        .save(dwd_path)
      println(s"${dwd_name}表数据写入完成")
    }

    //  todo 向dwd层写入数据  单独为fact_order_detail准备，为了后面指标计算使用
    def ods_to_dwd03(ods_name: String, dwd_name: String, precombine_field: String): Unit = {
      val ods_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/${ods_name}"
      val dwd_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/${dwd_name}"

      spark.read.format("hudi").load(ods_path)
        .where(col("etl_date") === "20241104")
        .withColumn("dwd_insert_user", lit("user1"))
        .withColumn("dwd_insert_time", to_timestamp(lit(current_time)))
        .withColumn("dwd_modify_user", lit("user1"))
        .withColumn("dwd_modify_time", to_timestamp(lit(current_time)))
        .withColumn("etl_date",lit("20241104"))
        .write.format("hudi").mode("append")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(), "id")
        .option(PRECOMBINE_FIELD.key(), precombine_field)
        .option(PARTITIONPATH_FIELD.key(), "etl_date")
        .option("hoodie.table.name", dwd_name)
        .save(dwd_path)
      println(s"${dwd_name}表数据写入完成")
    }


    //  todo 创建表和数据库
    create_database_table()
    //  todo 写入数据
    ods_to_dwd("user_info","dim_user_info","operate_time")
    ods_to_dwd("sku_info","dim_sku_info","dwd_modify_time")
    ods_to_dwd("base_province","dim_province","dwd_modify_time")
    ods_to_dwd("base_region","dim_region","dwd_modify_time")
    ods_to_dwd02("order_info","fact_order_info","operate_time")
    ods_to_dwd03("order_detail","fact_order_detail","dwd_modify_time")






    spark.close()
  }

}
