package com.hrt.hudi.spark

import org.apache.hudi.{DataSourceReadOptions, DataSourceWriteOptions}
import org.apache.hudi.config.HoodieWriteConfig
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.serializer.KryoSerializer

/**
 * MOR操作hudi数据
 */
object DeleteDataFromHudi {
  def main(args: Array[String]): Unit = {
    val session: SparkSession = SparkSession.builder().appName("test")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer").master("local").getOrCreate()

    val df: DataFrame = session.read.json("file:///F:\\P8\\workspace\\bigdatalake\\data\\jsondata.json")
//        val df: DataFrame = session.read.json("file:///F:\\P8\\workspace\\bigdatalake\\data\\updatedata.json")
//        val df: DataFrame = session.read.json("file:///F:\\P8\\workspace\\bigdatalake\\data\\appendData1.json")
//        val df: DataFrame = session.read.json("file:///F:\\P8\\workspace\\bigdatalake\\data\\deleteData.json")

    df.write.format("hudi")
      .option(DataSourceWriteOptions.TABLE_TYPE_OPT_KEY,DataSourceWriteOptions.MOR_TABLE_TYPE_OPT_VAL)
//      .option(DataSourceWriteOptions.OPERATION_OPT_KEY,"insert_overwrite_table")
      .option(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY,"id")
      .option(DataSourceWriteOptions.PARTITIONPATH_FIELD_OPT_KEY,"loc")
      .option(DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY,"data_dt")
      .option("hoodie.upsert.shuffle.parallelism","2")
      .option("hoodie.insert.shuffle.parallelism","2")
      .option("hoodie.delete.shuffle.parallelism","2")
//      .option("hoodie.cleaner.commits.retained","1")
      .option("hoodie.compact.inline",true)
      .option("hoodie.cleaner.commits.retained",1)
      .option("hoodie.compact.inline.max.delta.commits",1)
      .option(HoodieWriteConfig.TABLE_NAME,"person_infos")
      .mode(SaveMode.Append)
      .save("/hudi_data/person_infos")


    val frame: DataFrame = session.read.format("hudi")
      .option(DataSourceReadOptions.QUERY_TYPE_OPT_KEY,DataSourceReadOptions.QUERY_TYPE_READ_OPTIMIZED_OPT_VAL)
      .load("/hudi_data/person_infos/*/*")
    frame.show()
  }

}
