package com.shujia.dw

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DwMoageLocationMskDApp {
  def main(args: Array[String]): Unit = {


    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwMoageLocationMskDApp")
      .config("spark.sql.shuffle.partitions", 20)
      .enableHiveSupport() //读取hive的元数据徐娅开启
      .getOrCreate()

    val day_id: String = "20180503"


    val unionDF: DataFrame = spark.sql(
      s"""
         |
         |select * from
         |(select * from ods.ods_ddr where day_id = '$day_id')
         |union all
         |(select * from ods.ods_dpi where day_id = '$day_id')
         |union all
         |(select * from ods.ods_oidd where day_id = '$day_id')
         |union all
         |(select * from ods.ods_wcdr where day_id = '$day_id')
         |
        |
      """.stripMargin)


    unionDF
      .write
      .mode(SaveMode.Overwrite)
      .option("sep", "\t")
      .csv(s"/daas/motl/dwi/dwi_res_regn_mergelocation_msk_d/day_id=$day_id")

  }
}
