package cn.doitedu.hudi

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD,  PRECOMBINE_FIELD,  RECORDKEY_FIELD}
import org.apache.spark.sql.{SaveMode, SparkSession}

import scala.collection.mutable

object Demo {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .config("fs.defaultFS","hdfs://doit01:8020/")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.warehouse.dir","hdfs://doit01:8020/hudi_dw/")
      .appName("")
      .master("local")
      .getOrCreate()

    import spark.implicits._
    val df = spark.createDataset(Seq((1,"a",10,"2021-10-10"),(2,"b",20,"2021-10-10"),(3,"c",30,"2021-10-10"),(4,"d",40,"2021-10-10"))).toDF("id","name","age","dt")


    val demoConfigs = new mutable.HashMap[String,String]()
    demoConfigs.put("hoodie.insert.shuffle.parallelism", "2")
    demoConfigs.put("hoodie.upsert.shuffle.parallelism", "2")
    demoConfigs.put("hoodie.bulkinsert.shuffle.parallelism", "2")
    demoConfigs.put("hoodie.delete.shuffle.parallelism", "2")

    df.write.format("hudi")
      .options(demoConfigs)
      .option(PRECOMBINE_FIELD.key(), "id")
      .option(RECORDKEY_FIELD.key(), "id")
      .option(PARTITIONPATH_FIELD.key(), "dt")
      .option("hoodie.table.name", "hudi_01")
      .mode(SaveMode.Overwrite)
      .save("/hudi_01")

    spark.close()
  }

}
