package com.xl.competition.modul_b.task1

import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.{Column, Row, SaveMode, SparkSession}

import java.sql.Timestamp
import java.util.Properties

/**
 * @author: xl
 * @createTime: 2023/11/14 09:19:00
 * @program: com.xl.competition
 */
object LoadUserInfoToOds {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://node2:9083")
      .getOrCreate()

    SparkSession.builder().master("").config("", "").appName("").getOrCreate()


    val prop = new Properties()
    prop.put("user", "root")
    prop.put("password", "Abc123..")

    spark.read
      //      jdbc:mysql://192.168.0.1:3306/hive_db
      .jdbc("jdbc:mysql://node3:3306/shtd_store", "user_info", prop)
      .createTempView("temp_user_info") //注册为临时视图


    val timestamp: Long = spark.sql(
      """
        |select `if`(max(unix_timestamp(create_time)) > max(nvl(unix_timestamp(operate_time), 0L)),
        |            max(unix_timestamp(create_time)), max(unix_timestamp(operate_time))
        |           )
        |from ods.user_info
        |""".stripMargin)
      .first()
      .getLong(0)


    println(timestamp)

    spark.sql(
      s"""
         |insert into table ods.user_info partition (etl_date = '20231117')
         |select * from temp_user_info
         |where unix_timestamp(create_time) > $timestamp or unix_timestamp(operate_time) > $timestamp
         |""".stripMargin)
    spark.stop()
  }
}
