package ds_industry_2025.ds

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, lit, to_timestamp, when}

import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}

// todo 电商卷子hudi类型 ods层的表格创建
object hudi_ods_table_perparation {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("test")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")

    val day=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)

    //  创建数据库和表的方法
   def create_database_table():Unit={
     // todo 要删除hudi数据库还需要手动去hdfs上面删除
     spark.sql("drop database if exists ods_ds_hudi cascade")
     spark.sql("create database if not exists ods_ds_hudi")
     spark.sql("use ods_ds_hudi")

     //  todo user_info
     spark.sql("drop table if exists user_info")
     spark.sql(
       """
         |create table if not exists user_info(
         |id int,
         |login_name string,
         |nick_name string,
         |passwd string,
         |name string,
         |phone_num string,
         |email string,
         |head_img string,
         |user_level string,
         |birthday timestamp,
         |gender string,
         |create_time timestamp,
         |operate_time timestamp
         |)using hudi
         |tblproperties(
         |type="cow",
         |primaryKey="id",
         |preCombineField="operate_time",
         |hoodie.datasource.hive_aync.mode="hms"
         |)
         |partitioned by(etl_date String)
         |""".stripMargin)
     println("user_info表创建完成")

     //  todo sku_info
     spark.sql("drop table if exists sku_info")
     spark.sql(
       """
         |create table if not exists sku_info(
         |id int,
         |spu_id int,
         |price decimal(10,0),
         |sku_name string,
         |sku_desc string,
         |weight decimal(10,2),
         |tm_id int,
         |category3_id int,
         |sku_default_img string,
         |create_time timestamp
         |)using hudi
         |tblproperties(
         |type="cow",
         |primaryKey="id",
         |preCombineField="create_time",
         |hoodie.datasource.hive_aync.mode="hms"
         |)
         |partitioned by(etl_date String)
         |""".stripMargin)
     println("sku_info表创建完成")

     //  todo base_province
     spark.sql("drop table if exists base_province")
     spark.sql(
       """
         |create table if not exists base_province(
         |id int,
         |name string,
         |region_id string,
         |area_code string,
         |iso_code string,
         |create_time timestamp
         |)using hudi
         |tblproperties(
         |type="cow",
         |primaryKey="id",
         |preCombineField="create_time",
         |hoodie.datasource.hive_aync.mode="hms"
         |)
         |partitioned by(etl_date String)
         |""".stripMargin)
     println("base_province表创建完成")

     //  todo base_region
     spark.sql("drop table if exists base_region")
     spark.sql(
       """
         |create table if not exists base_region(
         |id string,
         |region_name string,
         |create_time timestamp
         |)using hudi
         |tblproperties(
         |type="cow",
         |primaryKey="id",
         |preCombineField="create_time",
         |hoodie.datasource.hive_aync.mode="hms"
         |)
         |partitioned by(etl_date String)
         |""".stripMargin)
     println("base_region表创建完成")

     //  todo order_info
     spark.sql("drop table if exists order_info")
     spark.sql(
       """
         |create table if not exists order_info(
         |id int,
         |consignee string,
         |consignee_tel string,
         |final_total_amount decimal(16,2),
         |order_status string,
         |user_id int,
         |delivery_address string,
         |order_comment string,
         |out_trade_no string,
         |trade_body string,
         |create_time timestamp,
         |operate_time timestamp,
         |expire_time timestamp,
         |tracking_no string,
         |parent_order_id int,
         |img_url string,
         |province_id int,
         |benefit_reduce_amount decimal(16,2),
         |original_total_amount decimal(16,2),
         |feight_fee decimal(16,2)
         |)using hudi
         |tblproperties(
         |type="cow",
         |primaryKey="id",
         |preCombineField="operate_time",
         |hoodie.datasource.hive_aync.mode="hms"
         |)
         |partitioned by(etl_date String)
         |""".stripMargin)
     println("order_info表创建完成")

     //  todo order_detail
     spark.sql("drop table if exists order_detail")
     spark.sql(
       """
         |create table if not exists order_detail(
         |id int,
         |order_id int,
         |sku_id int,
         |sku_name string,
         |img_url string,
         |order_price decimal(10,2),
         |sku_num string,
         |create_time timestamp,
         |source_type string,
         |source_id int
         |)using hudi
         |tblproperties(
         |type="cow",
         |primaryKey="id",
         |preCombineField="create_time",
         |hoodie.datasource.hive_aync.mode="hms"
         |)
         |partitioned by(etl_date String)
         |""".stripMargin)
     println("base_detail表创建完成")
   }

    //  todo 创建写入数据的方法01:存在create_time和operate_time两个字段
    def mysql_to_ods01(tableName:String,precombinefield:String):Unit={
      val ods_path=s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/$tableName"
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false",tableName,conn)
        .withColumn(
          "create_time",
          when(col("create_time").isNull,to_timestamp(lit(current_time))).otherwise(col("create_time"))
        )
        .withColumn(
          "operate_time",
          when(col("operate_time").isNull,to_timestamp(lit(current_time))).otherwise(col("operate_time"))
        )
        .withColumn("etl_date",lit("20241104"))
        .write.format("hudi")
        .mode("append")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(),"id")
        .option(PRECOMBINE_FIELD.key(),precombinefield)
        .option(PARTITIONPATH_FIELD.key(),"etl_date")
        .option("hoodie.table.name",tableName)
        .save(ods_path)

      println(s"${tableName}数据准备完毕")
    }

    //  todo 创建写入数据的方法02:只存在create_time字段
    def mysql_to_ods02(tableName: String, precombinefield: String): Unit = {
      val ods_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/$tableName"
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", tableName, conn)
        .withColumn(
          "create_time",
          to_timestamp(lit(current_time))
        )
        .withColumn("etl_date", lit("20241104"))
        .write.format("hudi")
        .mode("append")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(), "id")
        .option(PRECOMBINE_FIELD.key(), precombinefield)
        .option(PARTITIONPATH_FIELD.key(), "etl_date")
        .option("hoodie.table.name", tableName)
        .save(ods_path)

      println(s"${tableName}数据准备完毕")
    }

    //  todo 创建写入数据的方法03:没有时间字段的
    def mysql_to_ods03(tableName: String, precombinefield: String): Unit = {
      val ods_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/$tableName"
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", tableName, conn)
        .withColumn("etl_date", lit("20241104"))
        .write.format("hudi")
        .mode("append")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(), "id")
        .option(PRECOMBINE_FIELD.key(), precombinefield)
        .option(PARTITIONPATH_FIELD.key(), "etl_date")
        .option("hoodie.table.name", tableName)
        .save(ods_path)

      println(s"${tableName}数据准备完毕")
    }



    //  todo 建表
    create_database_table()
    //  todo 写入数据
    mysql_to_ods01("user_info","operate_time")
    mysql_to_ods03("sku_info","create_time")
    mysql_to_ods02("base_province","create_time")
    mysql_to_ods02("base_region","create_time")
    mysql_to_ods03("order_info","operate_time")
    mysql_to_ods03("order_detail","create_time")



    println("ods层表和数据准备完毕")



    spark.close()

  }

}
