package Hudi_SelfStudy.spark_sql_study

import org.apache.spark.sql.SparkSession
import org.apache.hudi.DataSourceWriteOptions._

object cast_to_create_table {
  def main(args: Array[String]): Unit = {
        //  准备sparksql集成hudi的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("test")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use hudi_study")

//  通过CTAS创建cow非分区表，不指定preCombineField
    spark.sql(
      """
        |create table hudi_ctas_cow_nonpcf_tb1
        |using hudi
        |tblproperties (primaryKey='id')
        |as
        |select
        |1 as id,
        |'a1' as name,
        |10 as price
        |""".stripMargin)

    //  通过CTAS创建cow分区表，指定preCombineField
    spark.sql(
      """
        |create table hudi_ctas_cow_pt_tb1
        |using hudi
        |tblproperties (type='cow',primaryKey='id',preCombineField='ts')
        |partitioned by (dt)
        |as
        |select
        |1 as id,
        |'a1' as name,
        |10 as price,
        |1000 as ts,
        |'2021-12-01' as dt
        |""".stripMargin)

    //  通过CTAS从其他表加载数据




    spark.close()
  }

}
