package com.shujia.dim

import com.shujia.utils.SparkTool
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DimUserTagDay extends SparkTool {


  override def run(spark: SparkSession): Unit = {

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val usertagDF: DataFrame = spark
      //1、读取用户画像表
      .table("ods.ods_usertag_d")
      //2、取一天的数据
      .where($"day_id" === day_id)
      //3、
      .select(
      upper(md5(concat($"mdn", expr("'shujia'")))) as "mdn"
      , upper(md5(concat($"name", expr("'shujia'")))) as "name"
      , $"gender"
      , $"age"
      , upper(md5(concat($"id_number", expr("'shujia'")))) as "id_number"
      , $"number_attr"
      , $"trmnl_brand"
      , $"trmnl_price"
      , $"packg"
      , $"conpot"
      , $"resi_grid_id"
      , $"resi_county_id"
    )


    //保存数据
    usertagDF.write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dim/dim_usertag_msk_d/day_id=$day_id")


    //增加分区
    spark.sql(
      s"""
         |alter table dim.dim_usertag_msk_d add if not exists partition(day_id='$day_id')
         |
      """.stripMargin)
  }


}
