package DianShang_2024.ds_06.clean

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, date_format, date_sub, lit}

import java.text.SimpleDateFormat
import java.util.{Date, Properties}

object test {
  def main(args: Array[String]): Unit = {
    //  准备spark集成hudi的环境
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("准备数据")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    //  准备连接mysql的配置
    val mysql_jdbc = new Properties()
    mysql_jdbc.setProperty("user", "root")
    mysql_jdbc.setProperty("password", "123456")
    mysql_jdbc.setProperty("driver", "com.mysql.jdbc.Driver")

      //  测试表，测试不使用hudi元数据
    //  创建表
    //  test_table01
    spark.sql("use ods_ds_hudi")
    spark.sql("drop table if exists test_table01")
    spark.sql(
      """
        |create table test_table01(
        |id int,
        |login_name String,
        |nick_name String,
        |passwd String,
        |name String,
        |phone_num String,
        |email String,
        |head_img String,
        |user_level String,
        |birthday String,
        |gender String,
        |create_time String,
        |operate_time String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //  插入数据
    val user_info_path = "hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/test_table01"
    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "user_info", mysql_jdbc)
      .withColumn("etl_date", lit("20231027"))
      .filter(col("operate_time").isNotNull)
      .withColumn("birthday", date_format(col("birthday"), "yyyy-MM-dd"))
      .withColumn("create_time", date_format(col("create_time"), "yyyy-MM-dd HH:mm:ss"))
      .withColumn("operate_time", date_format(col("operate_time"), "yyyy-MM-dd HH:mm:ss"))
      .write.mode("append")
      .format("hudi")
      .options(getQuickstartWriteConfigs)
      .option(PRECOMBINE_FIELD.key(), "operate_time")
      .option(RECORDKEY_FIELD.key(), "id")
      .option(PARTITIONPATH_FIELD.key(), "etl_date")
      .option("hoodie.table.name", "test_table01")
      .option("hoodie.metadata.enable","true")
      .save(user_info_path)

    //  经过测试发现加不加.option("hoodie.metadata.enable","false")这一行是一样的,可能默认时关闭的
    //  就算为true也是一样的，没有区别，都还是需要修复分区才可以得到数据都

    spark.close()
  }

}
