package DianShang_2024.ds_server.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{current_timestamp, date_format, lit, to_timestamp}

object clean01 {
  def main(args: Array[String]): Unit = {
    /*
        1、抽取ods库中customer_inf表中昨天的分区（任务一生成的分区）数据，并结合dim_customer_inf最新分区现有的数据，根据customer_id合并数据到dwd库
        中dim_customer_inf的分区表（合并是指对dwd层数据进行插入或修改，需修改的数据以customer_id为合并字段，根据modified_time排序取最新的一条），分
        区字段为etl_date且值与ods库的相对应表该值相等，并添加dwd_insert_user、dwd_insert_time、dwd_modify_user、dwd_modify_time四列,
        其中dwd_insert_user、dwd_modify_user均填写“user1”。若该条记录第一次进入数仓dwd层则dwd_insert_time、dwd_modify_time均存当前操作时间，
        并进行数据类型转换。若该数据在进入dwd层时发生了合并修改，则dwd_insert_time时间不变，dwd_modify_time存当前操作时间，其余列存最新的值。使
        用hive cli执行show partitions dwd.dim_customer_inf命令；
     */

    //  准备 sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据清洗第一题")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd_server")

    /*
          处理逻辑:1.首先对ods_server.customer_inf01表进行添加字段操作，并且创建临时表
                  2.对两个表的数据进行完全合并操作
                  3.对合并后的表进行开窗函数，分区排序得到最新的数据
     */

    spark.sql(
      """
        |select
        |*
        |from ods_server.customer_inf01
        |where etl_date='20240311'
        |""".stripMargin)
      .withColumn("dwd_insert_user",lit("user1"))
      .withColumn(
        "dwd_insert_time",
        to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
      )
      .withColumn("dwd_modify_user",lit("user1"))
      .withColumn(
        "dwd_modify_time",
        to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
      )
      .createOrReplaceTempView("temp_table")

    //  将两个表的数据进行合并并且进行开窗，分区排序
    spark.sql(
      """
        |select
        |customer_inf_id,
        |customer_id,
        |customer_name,
        |identity_card_type,
        |identity_card_no,
        |mobile_phone,
        |customer_email,
        |gender,
        |customer_point,
        |register_time,
        |birthday,
        |customer_level,
        |customer_money,
        |modified_time,
        |row_number() over(partition by  customer_id order by modified_time desc ) as row_number,
        |dwd_insert_user,
        |min(dwd_insert_time) over(partition by customer_id ) as dwd_insert_time,
        |dwd_modify_user,
        |dwd_modify_time
        |from(
        |select
        |*
        |from dwd_server.dim_customer_inf
        |where etl_date='20240311'
        |union all
        |select * from temp_table
        |) as t1
        |""".stripMargin).createOrReplaceTempView("t1")

    spark.sql("select * from t1 limit 10").show

//  根据条件插入数据
    spark.sql(
      """
        |insert into table dwd_server.dim_customer_inf
        |partition(etl_date='20240311')
        |select
        |customer_inf_id,
        |customer_id,
        |customer_name,
        |identity_card_type,
        |identity_card_no,
        |mobile_phone,
        |customer_email,
        |gender,
        |customer_point,
        |register_time,
        |birthday,
        |customer_level,
        |customer_money,
        |modified_time,
        |dwd_insert_user,
        |cast(dwd_insert_time as timestamp),
        |dwd_modify_user,
        |cast(dwd_modify_time as timestamp)
        |from t1
        |where row_number=1
        |""".stripMargin)




    //  关闭sparksql的环境
    spark.close()
  }

}
