package com.shujia.dwi

import com.shujia.util.MD5
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object MergeLocationApp extends Logging {
  def main(args: Array[String]): Unit = {

    if (args == 0) {
      log.error("输出参数为空")
      return
    }

    val day_id: String = args(0)

    log.info(s"当前时间分区为：$day_id")

    val spark: SparkSession = SparkSession
      .builder()
      .appName("MergeLocationApp")
      .config("spark.sql.shuffle.partitions", "8")
      .enableHiveSupport() //使用hive元数据
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    /**
     * spark中注册自定义函数
     *
     */

    spark.udf.register("ods_md5", (str: String) => MD5.md5(str))

    /**
     * 在实际场景中，这一块的代码很复杂
     * 1、对数据脱敏
     * 2、从各种数据中取出位置
     * 3、需要对位置校准
     * 4、去除脏数据
     *
     */

    //1、读取数据
    val df: DataFrame = spark.sql(
      s"""
         |
         |select * from ods.ods_dpi where day_id=$day_id
         |union all
         |select * from ods.ods_wcdr where day_id=$day_id
         |union all
         |select * from ods.ods_ddr where day_id=$day_id
         |union all
         |select * from ods.ods_oidd where day_id=$day_id
         |
         |""".stripMargin)

    /**
     * mdn string comment '手机号码'
     * ,start_time string comment '业务时间'
     * ,county_id string comment '区县编码'
     * ,longi string comment '经度'
     * ,lati string comment '纬度'
     * ,bsid string comment '基站标识'
     * ,grid_id string comment '网格号'
     * ,biz_type string comment '业务类型'
     * ,event_type string comment '事件类型'
     * ,data_source string comment '数据源'
     *
     */

    //2、对数据脱敏
    val mergeDF: DataFrame = df.select(
      expr("ods_md5(mdn)") as "mdn",
      $"start_time",
      $"county_id",
      $"longi",
      $"lati",
      $"bsid",
      $"grid_id",
      $"biz_type",
      $"event_type",
      $"data_source"
    )

    //3、保存数据
    mergeDF
      .write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dwi/dwi_res_regn_mergelocation_msk_d/day_id=$day_id")

    //4、增加分区
    spark.sql(s"alter table dwi.dwi_res_regn_mergelocation_msk_d  add if not exists partition(day_id='$day_id')")

  }
}
