package DianShang_2024.ds_02.extract

import com.sun.jmx.mbeanserver.Util.cast
import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, date_format, lit}

import java.text.SimpleDateFormat
import java.util.{Date, Properties}

object data_preparation {
  def main(args: Array[String]): Unit = {
      //  准备spark集成hudi的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据抽取的数据准备")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use ods_ds_hudi02")

    //  连接mysql要用到的配置
    val mysql_connect=new Properties()
    mysql_connect.setProperty("user","root")
    mysql_connect.setProperty("password","123456")
    mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")


    //  定义读取数据到hudi表的方法
    def to_hudi(table_name:String,primaryKey:String,preCombineField:String): Unit = {
      //  hudi表在hdfs上面的路径
      val hdfs_path=s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi02.db/${table_name}"
      //  拿到mysql的数据
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false",table_name,mysql_connect)
        .withColumn("create_time",date_format(col("create_time"),"yyyy-MM-dd HH:mm:ss"))
        .withColumn("etl_date",lit("20240814"))
        .write.mode("append")
        .format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(PRECOMBINE_FIELD.key(),preCombineField)
        .option(RECORDKEY_FIELD.key(),primaryKey)
        .option(PARTITIONPATH_FIELD.key(),"etl_date")
        .option("hoodie.table.name",table_name)
        .save(hdfs_path)
    }

    //  user_info:由于operate_time字段存在null值，所以这里准备数据需要过滤掉为null的
    //  还要记得在hudi里面所有的时间数据必须换成字符串类型的数据
    val user_info_path="hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi02.db/user_info"
    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false","user_info",mysql_connect)
      .filter(col("operate_time").isNotNull)
      .withColumn("birthday",date_format(col("birthday"),"yyyy-MM-dd"))
      .withColumn("create_time",date_format(col("create_time"),"yyyy-MM-dd HH:mm:ss"))
      .withColumn("operate_time",date_format(col("operate_time"),"yyyy-MM-dd HH:mm:ss"))
      .withColumn("etl_date",lit("20240101"))
      .write.mode("append")
      .format("hudi")
      .options(getQuickstartWriteConfigs)
      .option(PRECOMBINE_FIELD.key(),"operate_time")
      .option(RECORDKEY_FIELD.key(),"id")
      .option(PARTITIONPATH_FIELD.key(),"etl_date")
      .option("hoodie.table.name","user_info")
      .save(user_info_path)

    //  sku_info
//    to_hudi("sku_info","id","create_time")

    //  base_province:需要添加字段
//    val base_province_path = "hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi02.db/base_province"
//    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "base_province", mysql_connect)
//      .withColumn(
//        "create_time",
//        lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
//        )
//      .withColumn("etl_date", lit("20240101"))
//      .write.mode("append")
//      .format("hudi")
//      .options(getQuickstartWriteConfigs)
//      .option(PRECOMBINE_FIELD.key(), "create_time")
//      .option(RECORDKEY_FIELD.key(), "id")
//      .option(PARTITIONPATH_FIELD.key(), "etl_date")
//      .option("hoodie.table.name", "base_province")
//      .save(base_province_path)

    //  base_region
//    val base_region_path = "hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi02.db/base_region"
//    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "base_region", mysql_connect)
//      .withColumn("id",col("id").cast("int"))
//      .withColumn(
//        "create_time",
//        lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
//      )
//      .withColumn("create_time",date_format(col("create_time"),"yyyy-MM-dd HH:mm:ss"))
//      .withColumn("etl_date", lit("20240101"))
//      .write.mode("append")
//      .format("hudi")
//      .options(getQuickstartWriteConfigs)
//      .option(PRECOMBINE_FIELD.key(), "create_time")
//      .option(RECORDKEY_FIELD.key(), "id")
//      .option(PARTITIONPATH_FIELD.key(), "etl_date")
//      .option("hoodie.table.name", "base_region")
//      .save(base_region_path)

    //  order_info
//    val order_info_path = "hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi02.db/order_info"
//    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "order_info", mysql_connect)
//      .withColumn("create_time",date_format(col("create_time"),"yyyy-MM-dd HH:mm:ss"))
//      .withColumn("operate_time",date_format(col("operate_time"),"yyyy-MM-dd HH:mm:ss"))
//      .withColumn("expire_time",date_format(col("expire_time"),"yyyy-MM-dd HH:mm:ss"))
//      .withColumn("etl_date", lit("20240101"))
//      .write.mode("append")
//      .format("hudi")
//      .options(getQuickstartWriteConfigs)
//      .option(PRECOMBINE_FIELD.key(), "operate_time")
//      .option(RECORDKEY_FIELD.key(), "id")
//      .option(PARTITIONPATH_FIELD.key(), "etl_date")
//      .option("hoodie.table.name", "order_info")
//      .save(order_info_path)


    //  order_detail
//    val order_detail_path = "hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi02.db/order_detail"
//    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "order_detail", mysql_connect)
//      .withColumn("create_time",date_format(col("create_time"),"yyyy-MM-dd HH:mm:ss"))
//      .withColumn("etl_date", lit("20240101"))
//      .write.mode("append")
//      .format("hudi")
//      .options(getQuickstartWriteConfigs)
//      .option(PRECOMBINE_FIELD.key(), "create_time")
//      .option(RECORDKEY_FIELD.key(), "id")
//      .option(PARTITIONPATH_FIELD.key(), "etl_date")
//      .option("hoodie.table.name", "order_detail")
//      .save(order_detail_path)










    spark.close()
  }

}
