package org.example

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._

object hive1 {
  def main(args: Array[String]): Unit = {
    val sc = new SparkConf().setAppName("test1")
    val spark = SparkSession.builder().config(sc).enableHiveSupport().getOrCreate()
    spark.read.format("jdbc")
      .option("url","jdbc:mysql://bigdata1:3306/ds_db01")
      .option("user","root")
      .option("password","123456")
      .option("driver","com.mysql.jdbc.Driver")
      .option("dbtable","customer_inf")
      .load()
      .createOrReplaceTempView("t1")
    spark.sql("select * from t1 limit 10000").withColumn("etl_date",lit("20240410"))
      .write.partitionBy("etl_date").mode("overwrite").saveAsTable("ods.customer_inf")
    spark.sql("select * from t1 where t1.modified_time > (select max(modified_time) from ods.customer_inf)")
      .withColumn("etl_date",lit("20240411")).write.partitionBy("etl_date")
      .mode("append").saveAsTable("ods.customer_inf")
    spark.sql("select * from t1 order by modified_time desc limit 10000")
      .withColumn("etl_date",lit("20240411")).write.partitionBy("etl_date")
      .mode("append").saveAsTable("ods.customer_inf")
    spark.table("ods.customer_inf")
      .limit(10000)
      .withColumn("dwd_insert_user",lit("user1"))
      .withColumn("dwd_modify_user",lit("user1"))
      .withColumn("dwd_insert_time",date_trunc("second",current_timestamp()))
      .withColumn("dwd_modify_time",date_trunc("second",current_timestamp()))
      .where("etl_date=20240411")
      .write
      .partitionBy("etl_date")
      .mode("overwrite")
      .saveAsTable("dwd.dim_customer_inf")

    spark.table("ods.customer_inf")
      .withColumn("dwd_insert_user", lit("user1"))
      .withColumn("dwd_modify_user", lit("user1"))
      .withColumn("dwd_insert_time", date_trunc("second", current_timestamp()))
      .withColumn("dwd_modify_time", date_trunc("second", current_timestamp()))
      .union(spark.table("dwd.dim_customer_inf"))
      .createOrReplaceTempView("t2")
    val w1 = Window.partitionBy("customer_id").orderBy(desc("modified_time"))
    val w2 = Window.partitionBy("customer_id")
    spark.sql("drop table dwd.dim_customer_inf")
    spark.table("t2")
      .withColumn("px",row_number().over(w1))
      .withColumn("dwd_insert_time",min("modified_time").over(w2))
      .withColumn("dwd_insert_time",max("modified_time").over(w2))
      .where("px=1")
      .drop("px")
      .write
      .mode("overwrite")
      .partitionBy("etl_date")
      .saveAsTable("dwd.dim_customer_inf")


//    spark.sql("select count(*) from t1").show()
//    spark.sql("select * from t1 where t1.modified_time > (select max(modified_time) from ods.customer_inf)")
//      .createOrReplaceTempView("t2")
//    spark.sql("select count(*) from t2").show()
//    spark.sql("select count(*) from ods.customer_inf").show()

    spark.stop()
  }
}
