import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}

object test {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")

    val sparkconf = new SparkConf().setMaster("local[*]").setAppName("aa")

    val spark = SparkSession.builder().config(sparkconf).config("HADOOP_USER_NAME", "root")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()


    spark.sql("create database tt")
    //  todo base_region
    spark.sql("drop table if exists tt.base_region")
    spark.sql(
      """
        |create table if not exists tt.base_region(
        |id string,
        |region_name string,
        |create_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="create_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)
    println("base_region表创建完成")

    val mysql_table = spark.read.format("jdbc")
      .option("url", "jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "base_region")
      .load()


    val data = mysql_table
      .withColumn("create_time", to_timestamp(current_timestamp(), "yyyy-MM-dd HH:mm:ss"))
      .withColumn("etl_date", date_format(date_sub(current_date(), 1), "yyyyMMdd"))


    data.write.format("hudi").mode("append")
      .options(getQuickstartWriteConfigs)
      .option(RECORDKEY_FIELD.key(), "id")
      .option(PRECOMBINE_FIELD.key(), "create_time")
      .option(PARTITIONPATH_FIELD.key(), "etl_date")
      .option("hoodie.table.name", "base_region")
      .save("/user/hive/warehouse/tt.db/base_region")

    data.show()






  }

}
