package com.zh.hudi

import com.zh.constants.Constants
import com.zh.util.{PropertiesUtils, SparkUtils}
import org.apache.hudi.DataSourceReadOptions
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import scala.collection.mutable

object SparkSQLSourceHudi {


  /**
   * args 1 propFilePath
   * args 2 appName
   *
   * @param args
   */
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
    System.setProperty("HADOOP_USER_NAME", "spark")

    //参数处理
    val argsTuple: (String, String) = SparkUtils.argsHandle(args)

    //初始spark
    val spark = SparkUtils.initSpark(argsTuple._2, sparkConf)

    val confName = argsTuple._1

    val hudiBasePath: String = PropertiesUtils.getValue(Constants.HUDI_TABLE_BASEPATH)

    readfromHudi(spark, hudiBasePath)

  }

  def readfromHudi(spark: SparkSession, basePath: String): Unit = {
    val hudiDf = spark.read
      .format("hudi")
      .options(hudiReadConfig)
      .load(basePath + "/*/*")

    hudiDf.createOrReplaceTempView("t1")

    val df = spark.sql("select * from t1")

    df.printSchema()
    df.show()

  }

  /**
   * hudi查询参数
   *
   * @return
   */
  def hudiReadConfig: mutable.Map[String, String] = {
    val confMap = new mutable.HashMap[String, String]()
    //读取试图模式
    confMap.put(DataSourceReadOptions.QUERY_TYPE_OPT_KEY, DataSourceReadOptions.QUERY_TYPE_SNAPSHOT_OPT_VAL)
    confMap
  }

}
