package ods_industry_2024.ods_02.extract

import org.apache.hudi.DataSourceWriteOptions.{PARTITIONPATH_FIELD, PRECOMBINE_FIELD, RECORDKEY_FIELD}
import org.apache.hudi.QuickstartUtils.getQuickstartWriteConfigs
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, lit, when}

import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}

object table_perparation {
  def main(args: Array[String]): Unit = {
        val spark=SparkSession.builder()
          .master("local[*]")
          .appName("数据抽取的表格准备")
          .config("hive.exec.dynamic.partition.mode","nonstrict")
          .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
          .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
          .enableHiveSupport()
          .getOrCreate()

    spark.sql("use ods_ds_hudi_02")

    //  user_info
    spark.sql("drop table if exists user_info")
    spark.sql(
      """
        |create table if not exists user_info(
        |id int,
        |login_name string,
        |nick_name string,
        |passwd string,
        |name string,
        |phone_num string,
        |email string,
        |head_img string,
        |user_level string,
        |birthday timestamp,
        |gender string,
        |create_time timestamp,
        |operate_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  sku_info
    spark.sql("drop table if exists sku_info")
    spark.sql(
      """
        |create table if not exists sku_info(
        |id int,
        |spu_id int,
        |price decimal(10,0),
        |sku_name string,
        |sku_desc string,
        |weight decimal(10,2),
        |tm_id int,
        |category3_id int,
        |sku_default_img string,
        |create_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="create_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  base_province
    spark.sql("drop table if exists base_province")
    spark.sql(
      """
        |create table if not exists base_province(
        |id int,
        |name string,
        |region_id string,
        |area_code string,
        |iso_code string,
        |create_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="create_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  base_region
    spark.sql("drop table if exists base_region")
    spark.sql(
      """
        |create table if not exists base_region(
        |id string,
        |region_name string,
        |create_time timestamp
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="create_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  order_info
    spark.sql("drop table if exists order_info")
    spark.sql(
      """
        |create table if not exists order_info(
        |id int,
        |consignee string,
        |consignee_tel string,
        |final_total_amount decimal(16,2),
        |order_status string,
        |user_id int,
        |delivery_address string,
        |order_comment string,
        |out_trade_no string,
        |trade_body string,
        |create_time timestamp,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no string,
        |parent_order_id int,
        |img_url string,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2)
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="operate_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  order_detail
    spark.sql("drop table if exists order_detail")
    spark.sql(
      """
        |create table if not exists order_detail(
        |id int,
        |order_id int,
        |sku_id int,
        |sku_name string,
        |img_url string,
        |order_price decimal(10,2),
        |sku_num string,
        |create_time timestamp,
        |source_type string,
        |source_id int
        |)using hudi
        |tblproperties(
        |type="cow",
        |primaryKey="id",
        |preCombineField="create_time",
        |hoodie.datasource.hive_aync.mode="hms"
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    spark.sql("show tables").show

    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

    val day:Calendar=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)
    day.add(Calendar.DATE,-1)
    val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)

    //  插入数据
    def to_hive(table_name:String,primaryKey:String,preCombineField:String):Unit={
      val hdfs_path=s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi_02.db/${table_name}"
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false",table_name,connect)
        .withColumn("etl_date",lit(yesterday))
        .write.mode("append")
        .format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(),primaryKey)
        .option(PRECOMBINE_FIELD.key(),preCombineField)
        .option(PARTITIONPATH_FIELD.key(),"etl_date")
        .option("hoodie.table.name",table_name)
        .save(hdfs_path)
      println(s"${table_name}准备完成")
    }

    def to_hive02(table_name: String, primaryKey: String, preCombineField: String): Unit = {
      val hdfs_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi_02.db/${table_name}"
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", table_name, connect)
        .withColumn(
          "operate_time",
          when(col("operate_time").isNull,col("create_time")).otherwise(col("operate_time"))
        )
        .withColumn("etl_date", lit(yesterday))
        .write.mode("append")
        .format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(), primaryKey)
        .option(PRECOMBINE_FIELD.key(), preCombineField)
        .option(PARTITIONPATH_FIELD.key(), "etl_date")
        .option("hoodie.table.name", table_name)
        .save(hdfs_path)
      println(s"${table_name}准备完成")
    }


    def to_hive03(table_name: String, primaryKey: String, preCombineField: String): Unit = {
      val hdfs_path = s"hdfs://192.168.40.110:9000/user/hive/warehouse/ods_ds_hudi_02.db/${table_name}"
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", table_name, connect)
        .withColumn(
          "create_time",
          lit(current_time)
        )
        .withColumn("etl_date", lit(yesterday))
        .write.mode("append")
        .format("hudi")
        .options(getQuickstartWriteConfigs)
        .option(RECORDKEY_FIELD.key(), primaryKey)
        .option(PRECOMBINE_FIELD.key(), preCombineField)
        .option(PARTITIONPATH_FIELD.key(), "etl_date")
        .option("hoodie.table.name", table_name)
        .save(hdfs_path)
      println(s"${table_name}准备完成")
    }

    to_hive02("user_info","id","operate_time")
    to_hive("sku_info","id","create_time")
    to_hive03("base_province","id","create_time")
    to_hive03("base_region","id","create_time")
    to_hive("order_info","id","operate_time")
    to_hive("order_detail","id","create_time")







    spark.close()
  }

}
