package com.xl.competition.modul_b.task1

import org.apache.spark.sql.SparkSession

import java.sql.Timestamp
import java.util.Properties

/**
 * @author: xl
 * @createTime: 2023/11/15 13:10:33
 * @program: com.xl.competition
 * @description: ${description}
 */
object LoadOrderInfoToOds {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://node2:9083")
      .config("spark.sql.parquet.writeLegacyFormat", "true")
      .getOrCreate()

    val prop = new Properties()
    prop.put("user", "root")
    prop.put("password", "Abc123..")

    spark.read
      .jdbc("jdbc:mysql://node3:3306/shtd_store", "order_info", prop)
      .createTempView("temp_order_info")

    val timestamp: Long = spark.sql(
      """
        |select `if`(max(unix_timestamp(create_time)) > max(unix_timestamp(operate_time)), max(unix_timestamp(create_time)),
        |            max(unix_timestamp(operate_time)))
        |from ods.order_info
        |""".stripMargin)
      .first()
      .getLong(0)

    spark.sql(
      s"""
        |insert into ods.order_info partition (etl_date = '20231117')
        |select * from temp_order_info
        |where unix_timestamp(create_time) > $timestamp or unix_timestamp(operate_time) > $timestamp
        |""".stripMargin)

    spark.stop()
  }
}
