package com.tour.dwd

import com.shujia.common.util.Md5
import com.shujia.common.{Constants, SparkTool}
import org.apache.spark.sql._
import org.apache.spark.sql.expressions.UserDefinedFunction

object MergeLocationApp extends SparkTool {

  /**
    *
    * 在子类中实现run方法，编写自定义的代码逻辑
    *
    * @param spark ： spark 入口
    */
  override def run(spark: SparkSession): Unit = {
    import org.apache.spark.sql.functions._
    import spark.implicits._

    val oidd: DataFrame = spark.sql(s"select * from ${Constants.ODS_DATABASE_NAME}.${Constants.OIDD_TABLE_NAME} where day_id=$day_id")
    val ddr: DataFrame = spark.sql(s"select * from ${Constants.ODS_DATABASE_NAME}.${Constants.DDR_TABLE_NAME} where day_id=$day_id")
    val dpi: DataFrame = spark.sql(s"select * from ${Constants.ODS_DATABASE_NAME}.${Constants.DPI_TABLE_NAME} where day_id=$day_id")
    val wcdr: DataFrame = spark.sql(s"select * from ${Constants.ODS_DATABASE_NAME}.${Constants.WCDR_TABLE_NAME} where day_id=$day_id")


    //合并四类数据得到位置融合表
    val mergeLocation: Dataset[Row] = oidd.union(ddr).union(dpi).union(wcdr)

    //使用自定义函数
    val md5Udf: UserDefinedFunction = udf((str: String) => Md5.md5(str))


    //对手机号进行加密
    val mergeLocationMsk: DataFrame = mergeLocation
      .select(
        md5Udf($"mdn") as "mdn",
        $"start_time",
        $"county_id",
        $"longi",
        $"lati",
        $"bsid",
        $"grid_id",
        $"biz_type",
        $"event_type",
        $"data_source"
      )

    //保存数据
    //需要现在hive中创建位置融合表
    mergeLocationMsk
      .write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"${Constants.MERGELOCATION_PATH_NAME}day_id=$day_id")


    //增加分区
    spark.sql(s"alter table ${Constants.DWD_DATABASE_NAME}.${Constants.MERGELOCATION_TABLE_NAME}  add if not exists partition(day_id='$day_id')")

    /**
      * spark-submit --master yarn-client --class com.tour.dwd.MergeLocationApp --num-executor
      * s 2 --executor-memory 4G --executor-cores 2 --jars common-1.0-SNAPSHOT.jar dwd-1.0-SNAPSHOT.jar 20180503
      *
      */
  }

}
