package com.shujia.dim

import com.shujia.common.SparkTool
import org.apache.spark.sql.{Dataset, Row, SaveMode, SparkSession}

object DimUsertagMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 将ods层的用户画像数据脱敏后再加载到dim层中
    val odsUserTagDF: Dataset[Row] = spark.table("ods.ods_usertag_d").where($"day_id" === dayId)

    odsUserTagDF
      .select(
        upper(md5($"mdn")) as "mdn"
        , $"name"
        , $"gender"
        , $"age"
        , upper(md5($"id_number")) as "id_number"
        , $"number_attr"
        , $"trmnl_brand"
        , $"trmnl_price"
        , $"packg"
        , $"conpot"
        , $"resi_grid_id"
        , $"resi_county_id")
      // 将数据保存到对应的表的目录当中
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dim/dim_usertag_msk_d/day_id=$dayId")

    // 给表增加分区
    spark.sql(
      s"""
         |alter table dim.dim_usertag_msk_d add if not exists partition(day_id=$dayId)
         |""".stripMargin)

  }
}
