package com.shujia.dim

import com.shujia.util.MD5
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object UserTagOdsToDIm extends Logging {

  def main(args: Array[String]): Unit = {


    if (args.length == 0) {
      log.error("输入参数为空")
      return
    }


    //月分区参数
    val month_id = args(0)


    /**
      *
      * 将ods层的用户画像表导入到DIm层
      * 1、需要对数据进行脱敏
      *
      */


    val spark: SparkSession = SparkSession
      .builder()
      .appName("UserTagOdsToDIm")
      .config("spark.sql.shuffle.partitions", "4")
      .enableHiveSupport() //使用hive的元数据
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._


    //1 读取数据
    val usertag: DataFrame = spark.sql(s"select * from ods.ods_usertag_m where  month_id=$month_id")


    /**
      *
      * spark中注册自定义函数
      *
      */

    spark.udf.register("ods_md5", (str: String) => MD5.md5(str))

    /**
      *
      * mdn string comment '手机号大写MD5加密'
      * ,name string comment '姓名'
      * ,gender string comment '性别，1男2女'
      * ,age string comment '年龄'
      * ,id_number string comment '证件号码'
      * ,number_attr string comment '号码归属地'
      * ,trmnl_brand string comment '终端品牌'
      * ,trmnl_price string comment '终端价格'
      * ,packg string comment '套餐'
      * ,conpot string comment '消费潜力'
      * ,resi_grid_id string comment '常住地网格'
      * ,resi_county_id string comment '常住地区县'
      *
      */

    //2 对数据脱敏
    val dimUserTag: DataFrame = usertag.select(
      expr("ods_md5(mdn)") as "mdn",
      expr("ods_md5(name)") as "name",
      $"gender",
      $"age",
      expr("ods_md5(id_number)") as "id_number",
      $"number_attr",
      $"trmnl_brand",
      $"trmnl_price",
      $"packg",
      $"conpot",
      $"resi_grid_id",
      $"resi_county_id"
    )

    //3 将数据保存到dim层中

    dimUserTag
      .write
      .format("csv")
      .mode(SaveMode.Overwrite)
      .option("sep", "\t")
      .save(s"/daas/motl/dim/dim_usertag_msk_m/month_id=$month_id")



    ///增加分区
    spark.sql(s"alter table dim.dim_usertag_msk_m add if not exists partition(month_id='$month_id')")


    /**
      *
      * spark-submit --class com.shujia.dim.UserTagOdsToDIm --master yarn-client --jars common-1.0.jar  dim-1.0.jar  201805
      *
      */
  }
}
