package cn.doitedu.hudi

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import scala.collection.mutable

object Demo2 {
  def main(args: Array[String]): Unit = {

    val demoConfigs = new mutable.HashMap[String,String]()
    demoConfigs.put("hoodie.insert.shuffle.parallelism", "2")
    demoConfigs.put("hoodie.upsert.shuffle.parallelism", "2")
    demoConfigs.put("hoodie.bulkinsert.shuffle.parallelism", "2")
    demoConfigs.put("hoodie.delete.shuffle.parallelism", "2")
    demoConfigs.put("fs.defaultFS","hdfs://doit01:8020/")

    val conf = new SparkConf()
    conf.setAll(demoConfigs)

    val spark: SparkSession = SparkSession.builder()
      .config(conf)
      .config("fs.defaultFS","hdfs://doit01:8020/")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.warehouse.dir","hdfs://doit01:8020/hudi_dw/")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .appName("")
      .master("local")
      .getOrCreate()

    val sql =
      """
        | create table hudi_01 using hudi
        | options (
        |    primaryKey = 'id',
        |    preCombineField = 'id'
        | )
        | location '/hudi_01/'
        |""".stripMargin

    spark.sql(sql)

    spark.sql(
      """
        |
        |insert into hudi_01 select 5,'d',50,'2021-10-11'
        |
        |""".stripMargin)


    /*spark.sql(
      """
        |
        |select
        |*
        |from h_p1
        |
        |""".stripMargin).show(100,false)*/


    spark.close()




  }

}
