package com.shujia.dim

import com.shujia.common.SparkTool
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.slf4j.Logger

object DimUsertagMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import org.apache.spark.sql.functions._
    import spark.implicits._

    val userTagDF: DataFrame = spark.table("ods.ods_usertag_d").where($"day_id" === day_id)
    userTagDF
      .select(
        md5($"mdn") as "mdn"
        , $"name"
        , $"gender"
        , $"age"
        , $"id_number"
        , $"number_attr"
        , $"trmnl_brand"
        , $"trmnl_price"
        , $"packg"
        , $"conpot"
        , $"resi_grid_id"
        , $"resi_county_id"
      )
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dim/dim_usertag_msk_d/day_id=$day_id")

    // 增加分区
    spark.sql(
      s"""
         |alter table dim.dim_usertag_msk_d add if not exists partition(day_id=$day_id)
         |""".stripMargin)

  }
}
