package DianShang_2024.ds_06.clean

import org.apache.spark.sql.SparkSession

import java.util.Properties

object sql_to_create_hudiTable {
  def main(args: Array[String]): Unit = {
    //  准备spark集成hudi的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("准备数据")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

      spark.sql("use dwd_ds_hudi")

      //  dim_user_info
    spark.sql("drop table if exists dim_user_info")
    spark.sql(
      """
        |create table if not exists dim_user_info(
        |id int,
        |login_name String,
        |nick_name String,
        |passwd String,
        |name String,
        |phone_num String,
        |email String,
        |head_img String,
        |user_level String,
        |birthday String,
        |gender String,
        |create_time String,
        |operate_time String,
        |dwd_insert_user String,
        |dwd_insert_time String,
        |dwd_modify_user String,
        |dwd_modify_time String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date String )
        |""".stripMargin)

      //  dim_sku_info
    spark.sql("drop table if exists dim_sku_info")
    spark.sql(
      """
        |create table if not exists dim_sku_info(
        |id int,
        |spu_id int,
        |price decimal(10,0),
        |sku_name String,
        |sku_desc String,
        |weight decimal(10,2),
        |tm_id int,
        |category3_id int,
        |sku_default_img String,
        |create_time String,
        |dwd_insert_user String,
        |dwd_insert_time String,
        |dwd_modify_user String,
        |dwd_modify_time String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="dwd_modify_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //  dim_province
    spark.sql("drop table if exists dim_province")
    spark.sql(
      """
        |create table if not exists dim_province(
        |id int,
        |name String,
        |region_id String,
        |area_code String,
        |iso_code String,
        |create_time String,
        |dwd_insert_user String,
        |dwd_insert_time String,
        |dwd_modify_user  String,
        |dwd_modify_time String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="dwd_modify_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //  dim_region
    spark.sql("drop table if exists dim_region")
    spark.sql(
      """
        |create table if not  exists dim_region(
        |id String,
        |region_name String,
        |create_time String,
        |dwd_insert_user String,
        |dwd_insert_time String,
        |dwd_modify_user String,
        |dwd_modify_time String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="dwd_modify_time",
        |"hoodie.datasource.hive_aync.mode"="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //  fact_order_info
    spark.sql("drop table if exists fact_order_info")
    spark.sql(
      """
        |create table if not exists fact_order_info(
        |id int,
        |consignee String,
        |consignee_tel String,
        |final_total_amount decimal(16,2),
        |order_status String,
        |user_id int,
        |delivery_address String,
        |order_comment String,
        |out_trade_no String,
        |trade_body String,
        |create_time String,
        |operate_time String,
        |expire_time String,
        |tracking_no String,
        |parent_order_id int,
        |img_url String,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2),
        |dwd_insert_user String,
        |dwd_insert_time String,
        |dwd_modify_user String,
        |dwd_modfiy_time String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |"hoodie.datasource.hive_aync.mode"="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //  fact_order_detail
    spark.sql("drop table if exists fatc_order_detail")
    spark.sql(
      """
        |create table if not exists fact_order_detail(
        |id int,
        |order_id int,
        |sku_id int,
        |sku_name String,
        |img_url String,
        |order_price decimal(10,2),
        |sku_num String,
        |create_time String,
        |source_type String,
        |source_id int,
        |dwd_insert_user String,
        |dwd_insert_time String,
        |dwd_modify_user String,
        |dwd_modify_time String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="dwd_modify_time",
        |"hoodie.datasource.hive_aync.mode"="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //   检查表是否创建成功
    spark.sql("show tables").show


    //  关闭环境
     spark.close()

  }

}
