package com.shujia.dim

import com.shujia.util.MD5
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object UserTagOdsToDim extends Logging {
  def main(args: Array[String]): Unit = {

    if (args.length == 0) {
      log.error("输出参数为空")
      return
    }

    val month_id: String = args(0)

    log.info(s"当前月分区为：$month_id")

    /**
     * 将ods层的用户画像表导入到dim层
     * 需要对数据进行脱敏
     *
     */

    val spark: SparkSession = SparkSession
      .builder()
      .appName("UserTagOdsToDim")
      .config("spark.sql.shuffle.partitions", "2")
      .enableHiveSupport() //使用hive的元数据
      .getOrCreate()

    //导入隐式转换
    import spark.implicits._
    import org.apache.spark.sql.functions._

    //1、读取数据
    val odsUserTag: DataFrame = spark.sql(s"select * from ods.ods_usertag_m where month_id=$month_id")

    /**
     * spark中注册自定义函数
     *
     */

    spark.udf.register("ods_md5", (str: String) => MD5.md5(str))

    /**
     * mdn string comment '手机号大写MD5加密'
     * ,name string comment '姓名'
     * ,gender string comment '性别，1男2女'
     * ,age string comment '年龄'
     * ,id_number string comment '证件号码'
     * ,number_attr string comment '号码归属地'
     * ,trmnl_brand string comment '终端品牌'
     * ,trmnl_price string comment '终端价格'
     * ,packg string comment '套餐'
     * ,conpot string comment '消费潜力'
     * ,resi_grid_id string comment '常住地网格'
     * ,resi_county_id string comment '常住地区县'
     *
     */

    //2、对数据脱敏
    val dimUserTag: DataFrame = odsUserTag.select(
      expr("ods_md5(mdn)") as "mdn",
      expr("ods_md5(name)") as "name",
      $"gender",
      $"age",
      expr("ods_md5(id_number)") as "id_number",
      $"number_attr",
      $"trmnl_brand",
      $"trmnl_price",
      $"packg",
      $"conpot",
      $"resi_grid_id",
      $"resi_county_id"
    )

    //3、将数据保存到dim层中
    dimUserTag
      .write
      .format("csv")
      .mode(SaveMode.Overwrite)
      .option("sep", "\t")
      .save(s"/daas/motl/dim/dim_usertag_msk_m/month_id=$month_id")

    //增加分区
    spark.sql(s"alter table dim.dim_usertag_msk_m add if not exists partition(month_id='$month_id')")

    /**
     * spark-submit --class com.shujia.dim.UserTagOdsToDim --master yarn-client --jars common-1.0-SNAPSHOT.jar dim-1.0-SNAPSHOT.jar 201805
     *
     */

  }
}
