package ods_industry_2024.ods_02.clean

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.hudi.org.apache.jetty.websocket.server.WebSocketHandler.Simple
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{current_timestamp, date_format, lit, to_timestamp}

import java.text.SimpleDateFormat
import java.util.Calendar

object table_perparation {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("表格准备")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd_ds_hudi_02")


    val day:Calendar=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)
    day.add(Calendar.DATE,-1)
    val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)

//    //  使用另外一种方式创建表，可以顺便吧数据也给写进去
//    def to_dwd(ods_name:String,dwd_name:String,primaryKey:String,precombioneField:String):Unit={
//      spark.table(s"ods_ds_hudi_02.${ods_name}")
//        .drop("etl_date")
//        .withColumn("dwd_insert_user",lit("user1"))
//        .withColumn(
//          "dwd_insert_time",
//          to_timestamp(lit(current_time),"yyyy-MM-dd HH:mm:ss")
//        )
//        .withColumn("dwd_modify_user",lit("user1"))
//        .withColumn(
//          "dwd_modify_time",
//          to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
//        )
//        .withColumn("etl_date",lit(yesterday))
//        .createOrReplaceTempView("ods")
//
//      spark.sql(s"drop table if exists ${dwd_name}")
//      spark.sql(
//        s"""
//          |create table if not exists ${dwd_name}
//          |using hudi
//          |tblproperties(
//          |type="cow",
//          |primaryKey='${primaryKey}',
//          |preCombineField='${precombioneField}',
//          |hoodie.table.name='${dwd_name}'
//          |)
//          |partitioned by(etl_date)
//          |as
//          |select * from ods
//          |""".stripMargin)
//    }
//
//
//
//    //  dim_user_info
//    to_dwd("user_info","dim_user_info","id","operate_time")
//    //  dim_sku_info
//    to_dwd("sku_info","dim_sku_info","id","dwd_modify_time")
//    //  dim_province
//    to_dwd("base_province","dim_province","id","dwd_modify_time")
//    //  dim_region
//    to_dwd("base_region","dim_region","id","dwd_modify_time")

    //  dim_user_info
    spark.sql("drop table if exists dim_user_info")
    spark.sql(
      """
        |create table if not exists dim_user_info(
        |id int,
        |login_name String,
        |nick_name String,
        |passwd String,
        |name String,
        |phone_num String,
        |email String,
        |head_img String,
        |user_level String,
        |birthday timestamp,
        |gender String,
        |create_time timestamp,
        |operate_time timestamp,
        |dwd_insert_user String,
        |dwd_insert_time timestamp,
        |dwd_modify_user String,
        |dwd_modify_time timestamp
        |)using hudi
        |tblproperties(
        |type=="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  dim_sku_info
    spark.sql("drop table if exists dim_sku_info")
    spark.sql(
      """
        |create table if not exists dim_sku_info(
        |id int,
        |spu_id int,
        |price decimal(10,0),
        |sku_name String,
        |sku_desc String,
        |weight decimal(10,2),
        |tm_id int,
        |category3_id int,
        |sku_default_img String,
        |create_time timestamp,
        |dwd_insert_user String,
        |dwd_insert_time timestamp,
        |dwd_modify_user String,
        |dwd_modify_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineKey="dwd_modify_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  dim_province
    spark.sql("drop table if exists dim_province")
    spark.sql(
      """
        |create table if not exists dim_province(
        |id int,
        |name String,
        |region_id String,
        |area_code String,
        |iso_code String,
        |create_time timestamp,
        |dwd_insert_user String,
        |dwd_insert_time timestamp,
        |dwd_modify_user  String,
        |dwd_modify_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="dwd_modify_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  dim_region
    spark.sql("drop table if exists dim_region")
    spark.sql(
      """
        |create table if not exists dim_region(
        |id String,
        |region_name String,
        |create_time timestamp,
        |dwd_insert_user String,
        |dwd_insert_time timestamp,
        |dwd_modify_user String,
        |dwd_modify_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="dwd_modify_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  fact_order_info
    spark.sql("drop table if exists fact_order_info")
    spark.sql(
      """
        |create table if not exists fact_order_info(
        |id int,
        |consignee String,
        |consignee_tel String,
        |final_total_amount decimal(16,2),
        |order_status String,
        |user_id int,
        |delivery_address String,
        |order_comment String,
        |out_trade_no String,
        |trade_body String,
        |create_time timestamp,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no String,
        |parent_order_id int,
        |img_url String,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2),
        |dwd_insert_user String,
        |dwd_insert_time timestamp,
        |dwd_modify_user String,
        |dwd_modfiy_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  fact_order_detail
    spark.sql("drop table if exists fact_order_detail")
    spark.sql(
      """
        |create table if not exists fact_order_detail(
        |id int,
        |order_id int,
        |sku_id int,
        |sku_name String,
        |img_url String,
        |order_price decimal(10,2),
        |sku_num String,
        |create_time timestamp ,
        |source_type String,
        |source_id int,
        |dwd_insert_user String,
        |dwd_insert_time timestamp ,
        |dwd_modify_user String,
        |dwd_modify_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="dwd_modify_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    spark.sql("show tables").show


    def get_data(ods_name:String,dwd_name:String,primaryKey:String,preCombineField:String):Unit={
      spark.table(s"ods_ds_hudi_02.${ods_name}")
        .drop("etl_date")
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn(
          "dwd_insert_time",
          to_timestamp(lit(current_time),"yyyy-MM-dd HH:mm:ss")
        )
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn(
          "dwd_modify_time",
          to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
        )
        .withColumn("etl_date",lit(yesterday))
        .write.mode("append")
        .format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(),primaryKey)
        .option(PRECOMBINE_FIELD.key(),preCombineField)
        .option(PARTITIONPATH_FIELD.key(),"etl_date")
        .option("hoodie.table.name",dwd_name)
        .save(s"hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi_02.db/${dwd_name}")

      println(s"${dwd_name}表格完成")
    }

    get_data("user_info","dim_user_info","id","operate_time")
    get_data("sku_info","dim_sku_info","id","dwd_modify_time")
    get_data("base_province","dim_province","id","dwd_modify_time")
    get_data("base_region","dim_region","id","dwd_modify_time")

    spark.table("dwd_ds_hudi_02.dim_region").show



    spark.close()
  }

}
