package industry_2024

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.lit

import java.util.Properties

object test {
  def main(args: Array[String]): Unit = {

    //  测试hudi表是否支持timestamp类型
    //  实验结果发现原来hudi建表使用timestamp类型是可以的，但是在hive的客户端去查看时间类型的数据会报错，但是在idea使用sparksql读取表格数据
    //  发现是没有问题的，并且在sparksql的客户端也是没有问题的

    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第四题")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use test_gy")

    //  建表
    //  changerecord
    spark.sql("drop table if exists changerecrod")
    spark.sql(
      """
        |create table if not exists changerecord(
        |ChangeID int,
        |ChangeMachineID int,
        |ChangeMachineRecordID int,
        |ChangeRecordState string,
        |ChangeStartTime  timestamp,
        |ChangeEndTime timestamp,
        |ChangeRecordData timestamp,
        |ChangeHandleState int
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="ChangeID,ChangeMachineID",
        |preCombineFiled="ChangeEndTime",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etldate string)
        |""".stripMargin)


    val connect = new Properties()
    connect.setProperty("user", "root")
    connect.setProperty("password", "123456")
    connect.setProperty("driver", "com.mysql.jdbc.Driver")

    def to_hive01(mysql_name: String, hive_name: String, primarykey: String, precombinefield: String): Unit = {
      val hdfs_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/test_gy.db/${hive_name}"
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_industry?useSSL=false", mysql_name, connect)
        .withColumn("etldate", lit("20240908"))
        .write.mode("append")
        .format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(), primarykey)
        .option(PRECOMBINE_FIELD.key(), precombinefield)
        .option(PARTITIONPATH_FIELD.key(), "etldate")
        .option("hoodie.table.name", hive_name)
        .option("hoodie.metadata.enable",true)
        .save(hdfs_path)
    }

    to_hive01("ChangeRecord","changerecord","ChangeID,ChangeMachineID","ChangeEndTime")

    val path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/test_gy.db/changerecord"
    spark.read.format("hudi").load(path).select("ChangeStartTime").show
    spark.read.format("hudi").load(path).select("ChangeID").show





    spark.close()
  }

}
