package org.example

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
import org.apache.hudi.DataSourceWriteOptions._

object hive1 {
  def main(args: Array[String]): Unit = {
    val sc = new SparkConf().setAppName("das")
    val spark = SparkSession.builder().config(sc).enableHiveSupport().getOrCreate()

    spark
      .read
      .format("jdbc")
      .option("url","jdbc:mysql://bigdata1:3306/ds_db01")
      .option("user","root")
      .option("password","123456")
      .option("driver","com.mysql.jdbc.Driver")
      .option("dbtable","customer_inf")
      .load()
      .createOrReplaceTempView("t1")

    val hudiBasePath = "/user/hive/warehouse/hudi_gy_ods.db"
    val tableName = "customer"

    spark
      .table("t1")
      .withColumn("etl_date",lit("20240416"))
      .createOrReplaceTempView("t2")

    spark.table("t2")
      .write
      .format("hudi")
      .option(PRECOMBINE_FIELD.key(),"modified_time")
      .option(RECORDKEY_FIELD.key(),"customer_id")
      .option(PARTITIONPATH_FIELD.key(),"etl_date")
      .option("hoodie.table.name",tableName)
      .partitionBy("etl_date")
      .mode("overwrite")
      .save(hudiBasePath)

    spark.sql("create database hudi_gy_ods")

    spark
      .table("t2")
      .write
      .partitionBy("etl_date")
      .mode("overwrite")
      .saveAsTable("hudi_gy_ods.customer")

    spark.sql("show partitions hudi_gy_ods.customer").show()

    spark
      .read
      .format("hudi")
      .load(hudiBasePath)
      .createOrReplaceTempView("hudi_tmp")

    spark.sql("select * from hudi_tmp").limit(10).show()

    //cp /opt/module/hudi-0.11.0/packaging/hudi-spark-bundle/target/hudi-spark3.1-bundle_2.12-0.11.0.jar
    // /opt/module/spark-3.1.1-yarn/jars/
    //spark-shell --conf "spark.serializer=org.apache.spark.serializer.KryoSerializer"

    spark.stop()
  }
}
