package ds_industry_2025.ds

import org.apache.spark.sql.functions.{col, date_format, lit, to_timestamp, when}
import org.apache.spark.sql.{DataFrame, SparkSession}

import java.text.SimpleDateFormat
import java.util.Calendar

object hive__dwd_table_perparation {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("hive卷子的dwd层的表格准备")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("create database if not exists dwd")
    spark.sql("use dwd")

    val day=Calendar.getInstance()
    val today=new SimpleDateFormat("yyyyMMdd").format(day.getTime)
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)

    //  todo 定义抽取ods层的表格的数据，并且添加字段写入dwd
    def ods_to_dwd(ods_name:String,dwd_name:String):Unit={
      spark.sql(s"drop table if exists dwd.$dwd_name")
      spark.table(s"ods.$ods_name")
        .drop("etl_date")
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn("dwd_insert_time",lit(current_time).cast("timestamp"))
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn("dwd_modify_time",lit(current_time).cast("timestamp"))
        .withColumn("etl_date",lit("20241104"))
        .write.mode("overwrite")
        .partitionBy("etl_date")
        .saveAsTable(s"dwd.$dwd_name")
      println(s"${dwd_name}写入完成")
    }

    //  写入数据
    ods_to_dwd("user_info","dim_user_info")
    ods_to_dwd("sku_info","dim_sku_info")
    ods_to_dwd("base_province","dim_province")
    ods_to_dwd("base_region","dim_region")

    println("数据写入dwd完成")
    println("接下来开始创建fact_order_info和fact_order_detail表")
    spark.sql("drop table if exists dwd.fact_order_info")
    spark.sql(
      """
        |create table if not exists dwd.fact_order_info(
        |id int,
        |consignee string,
        |consignee_tel string,
        |final_total_amount decimal(16,2),
        |order_status string,
        |user_id int,
        |delivery_address string,
        |order_comment string,
        |out_trade_no string,
        |trade_body string,
        |create_time timestamp,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no string,
        |parent_order_id int,
        |img_url string,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2),
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    spark.sql("drop table if exists dwd.fact_order_detail")
    spark.sql(
      """
        |create table if not exists dwd.fact_order_detail(
        |id int,
        |order_id int,
        |sku_id int,
        |sku_name string,
        |img_url string,
        |order_price decimal(10,2),
        |sku_num string,
        |create_time timestamp,
        |source_type string,
        |source_id int,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    println("表格创建完成")


    //  todo 定义抽取ods层的表格的数据，并且添加字段写入dwd
    def ods_to_dwd02(ods_name: String, dwd_name: String): Unit = {
      spark.sql(s"drop table if exists dwd.$dwd_name")
      spark.table(s"ods.$ods_name")
        .drop("etl_date")
        .withColumn(
          "operate_time",
          when(col("operate_time").isNull, col("create_time")).otherwise(col("operate_time"))
        )
        .withColumn("dwd_insert_user", lit("user1"))
        .withColumn("dwd_insert_time", lit(current_time).cast("timestamp"))
        .withColumn("dwd_modify_user", lit("user1"))
        .withColumn("dwd_modify_time", lit(current_time).cast("timestamp"))
        .withColumn("etl_date", lit("20241104"))
        .write.mode("append")
        .partitionBy("etl_date")
        .saveAsTable(s"dwd.$dwd_name")
      println(s"${dwd_name}写入完成")
    }

    //  todo 定义抽取ods层的表格的数据，并且添加字段写入dwd
    def ods_to_dwd03(ods_name: String, dwd_name: String): Unit = {
      spark.sql(s"drop table if exists dwd.$dwd_name")
      spark.table(s"ods.$ods_name")
        .drop("etl_date")
        .withColumn("dwd_insert_user", lit("user1"))
        .withColumn("dwd_insert_time", lit(current_time).cast("timestamp"))
        .withColumn("dwd_modify_user", lit("user1"))
        .withColumn("dwd_modify_time", lit(current_time).cast("timestamp"))
        .withColumn("etl_date", lit("20241104"))
        .write.mode("append")
        .partitionBy("etl_date")
        .saveAsTable(s"dwd.$dwd_name")
      println(s"${dwd_name}写入完成")
    }

    ods_to_dwd02("order_info","fact_order_info")
    ods_to_dwd03("order_detail","fact_order_detail")



    spark.close()
  }

}
