package com.shujiadw

import java.text.SimpleDateFormat

import com.shujia.common.util.MD5
import org.apache.spark.sql._
import java.util.Date

object MergeLocationJob {
  def main(args: Array[String]): Unit = {

    if (args.length == 0) {
      println("请指定时间参数")
      return
    }

    var day_id: String = args(0)

    if ("%y%m%d".equals(day_id)) {
      val f: SimpleDateFormat = new SimpleDateFormat("yyyyMMdd")
      day_id = f.format(new Date)
    }
    println("当前分区时间：" + day_id)


    /**
      * 读取 drr  oidd  wcdr dpi的数据抽出用户位置数据合并成位置融合表
      *
      *
      * mdn string comment '手机号码'
      * ,start_time string comment '业务时间'
      * ,county_id string comment '区县编码'
      * ,longi string comment '经度'
      * ,lati string comment '纬度'
      * ,bsid string comment '基站标识'
      * ,grid_id string comment '网格号'
      * ,biz_type string comment '业务类型'
      * ,event_type string comment '事件类型'
      * ,data_source string comment '数据源'
      *
      */

    val spark: SparkSession = SparkSession
      .builder()
      .appName("MergeLocationJob")
      .enableHiveSupport() //使用hived的元数据
      .getOrCreate()

    import spark.implicits._


    //诺奇hive中的表

    val ddr: DataFrame = spark
      .table("ods.ods_ddr")
      .where($"day_id" === day_id) //读取指定分区的数据

    val wcdr: DataFrame = spark
      .table("ods.ods_wcdr")
      .where($"day_id" === day_id)

    val dpi: DataFrame = spark
      .table("ods.ods_dpi")
      .where($"day_id" === day_id)

    val oidd: DataFrame = spark
      .table("ods.ods_oidd")
      .where($"day_id" === day_id)


    val unionDF: Dataset[Row] = ddr.union(wcdr).union(dpi).union(oidd)


    //注册自定义函数
    spark.udf.register("md5", (str: String) => MD5.md5(str))


    //脱敏
    val md4DF: DataFrame = unionDF
      .selectExpr("md5(mdn) as mdn", "start_time", "county_id", "longi", "lati", "bsid", "grid_id", "biz_type", "event_type", "data_source")


    md4DF
      .write
      .mode(SaveMode.Overwrite)
      .format("csv")
      .option("sep", "\t")
      .save(s"/daas/motl/dwi/dwi_res_regn_mergelocation_msk_d/day_id=$day_id")


    /**
      *
      * spark-submit --master yarn-client --jars common-1.0.jar --class com.shujiadw.MergeLocationJob dw-1.0.jar 20180503
      *
      */

  }
}
