package ds_industry_2025.ds

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.lit

import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}

object hive_ods_table_perparation {
  def main(args: Array[String]): Unit = {
    //  todo 给hive，电商卷子准备ods层的表和数据
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("hive，电商的ods层的表格准备")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("create database  if not exists ods")
    spark.sql("use ods")

    val connect = new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

    //  todo 获取当前时间
    val day=Calendar.getInstance()
    val current_time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)
    println("当前的时间为:",current_time)
    println("当前时间数据的数据类型为:",current_time.getClass)
    day.add(Calendar.DATE,-1)
    val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)
    println("昨天的日期为:",yesterday)

    //  todo 创建从mysql全量抽取数据到hive表  ---- 不需要添加create_time字段的
    def mysql_to_hive_ods01(tableName: String): Unit = {
      val mysql_data = spark.read
        .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false",tableName ,connect)
        .withColumn("etl_date",lit("20241104"))
        .write.mode("overwrite")
        .partitionBy("etl_date")
        .saveAsTable(tableName)
    }


    //  todo 创建从mysql全量抽取数据到hive表  ---- 需要添加create_time字段的
    def mysql_to_hive_ods02(tableName: String): Unit = {
      val mysql_data = spark.read
        .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", tableName, connect)
        .withColumn("create_time",lit(current_time).cast("timestamp"))
        .withColumn("etl_date", lit("20241104"))
        .write.mode("overwrite")
        .partitionBy("etl_date")
        .saveAsTable(tableName)
    }


    //  todo 运行方法
    mysql_to_hive_ods01("user_info")
    mysql_to_hive_ods01("sku_info")
    mysql_to_hive_ods02("base_province")
    mysql_to_hive_ods02("base_region")
    mysql_to_hive_ods01("order_info")
    mysql_to_hive_ods01("order_detail")


    println("数据写入ods层完成")

    spark.close()

  }
}


