package DianShang_2024.ds_06.extract

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, date_format, desc, lit}

import java.text.SimpleDateFormat
import java.util.{Date, Properties}

object Repreparation_data {
  def main(args: Array[String]): Unit = {
    //  准备spark集成hudi的环境
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("数据抽取第一题")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()


    spark.sql("use ods_ds_hudi")

    //  order_info
    spark.sql("drop table if exists order_info")
    spark.sql(
      """
        |create table order_info(
        |id int,
        |consignee String,
        |consignee_tel String,
        |final_total_amount decimal(16,2),
        |order_status String,
        |user_id int,
        |delivery_address String,
        |order_comment String,
        |out_trade_no String,
        |trade_body String,
        |create_time String,
        |operate_time String,
        |expire_time String,
        |tracking_no String,
        |parent_order_id int,
        |img_url String,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2)
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)


    //  准备连接mysql的配置
    val mysql_jdbc = new Properties()
    mysql_jdbc.setProperty("user", "root")
    mysql_jdbc.setProperty("password", "123456")
    mysql_jdbc.setProperty("driver", "com.mysql.jdbc.Driver")




    val order_info_path = "hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi.db/order_info"
    //  读取mysql里面的数据并且添加字段
    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "order_info", mysql_jdbc)
      .withColumn("etl_date", lit("20231027"))
      .withColumn("create_time", date_format(col("create_time"), "yyyy-MM-dd HH:mm:ss"))
      .withColumn("operate_time", date_format(col("operate_time"), "yyyy-MM-dd HH:mm:ss"))
      .withColumn("expire_time", date_format(col("expire_time"), "yyyy-MM-dd HH:mm:ss"))
      .write.mode("append")
      .format("hudi")
      .options(getQuickstartWriteConfigs)
      .option(PRECOMBINE_FIELD.key(), "operate_time")
      .option(RECORDKEY_FIELD.key(), "id")
      .option(PARTITIONPATH_FIELD.key(), "etl_date")
      .option("hoodie.table.name", "order_info")
      .save(order_info_path)



    spark.close()
  }

}
