package Hudi_SelfStudy.spark_sql_study

import org.apache.spark.sql.SparkSession

object study_01 {
  def main(args: Array[String]): Unit = {


    //  创建mor非分区表


          //   准备spark集成hudi的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("创建非分区mor表")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use hudi_study")
    /*
          tblproperties:定义表的一些属性
          primaryKey：主键字段
          preCombineField：预处理字段，后面合并去重就会按照这个字段的值进行去重
     */
    spark.sql(
      """
        |create table hudi_mor_tbl(
        |id int,
        |name string,
        |price double,
        |ts bigint
        |)using hudi
        |tblproperties(
        |type='mor',
        |primaryKey='id',
        |preCombineField='ts'
        |)
        |""".stripMargin)

    spark.sql("show tables").show


    //  关闭环境
    spark.close()
  }

}
