package Hudi_SelfStudy.spark_sql_study

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession

object study_04 {
  def main(args: Array[String]): Unit = {

    //  测试学长的代码创建表


    //  准备spark集成hudi的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("测试学长的")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use hudi_study")

    //  删除如果存在测试表
    spark.sql("drop  table if exists test_table")

    //  创建测试表
    spark.sql(
      """
        |create table test_table (
        |id int,
        |operate_time String,
        |name String
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField='operate_time',
        |hoodie.datasource.hive_aync.mode='hms'
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //  插入数据测试
    val path="hdfs://192.168.40.110:9000/user/hive/warehouse/hudi_study.db/test_table"

    // 创建测试数据
    val data = Seq(
      (1, "2024-05-09 10:00:00", "张三", "2024-05-09"),
      (2, "2024-05-09 11:00:00", "李四", "2024-05-09"),
      (3, "2024-05-09 12:00:00", "Charlie", "2024-05-09")
    )

    // 将数据转换为DataFrame
    val df = spark.createDataFrame(data).toDF("id", "operate_time", "name", "etl_date")

    // 将数据插入到测试表中
    df.write
      .format("hudi")
      .mode("append")
      .option("hoodie.datasource.write.table.name", "test_table")
      .options(getQuickstartWriteConfigs)
      .option(PRECOMBINE_FIELD.key(), "operate_time")
      .option(RECORDKEY_FIELD.key(), "id")
      .option(PARTITIONPATH_FIELD.key(), "etl_date")
      .option("hoodie.table.name", "test_table")
      .save(path)

    spark.sql(
      """
        |select * from test_table
        |""".stripMargin).show


    //  关闭spark的环境
    spark.close()

  }

}
