package com.shujia.tour

import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object ScenicTouristApp extends Logging {
  def main(args: Array[String]): Unit = {

    if (args.length == 0) {
      log.error("输出参数为空")
      return
    }

    val day_id: String = args(0)
    val month_id: String = day_id.substring(0, 6)

    log.info(s"当前天时间分区为：$day_id")
    log.info(s"当前月时间分区为：$month_id")

    val spark: SparkSession = SparkSession
      .builder()
      .appName("StayPointApp")
      .config("spark.sql.shuffle.partitions", "1")
      .enableHiveSupport() //使用hive元数据
      .getOrCreate()

    /**
     * 景区游客过滤
     * 1、目的地在景区内
     * 2、常住地不在景区内
     *
     */

    val resultDF: DataFrame = spark.sql(
      s"""
         |
         |SELECT
         |    t8.mdn,
         |    t9.city_id AS source_city_id,
         |    t8.scenic_id AS d_scenic_id,
         |    t8.scenic_name AS d_scenic_name,
         |    min( t8.grid_first_time ) AS d_arrive_time,
         |    sum( t8.duration ) AS d_stay_time
         |FROM
         |    (
         |    SELECT
         |        /*+ broadcast(t1) */
         |        t2.mdn,
         |        t2.duration,
         |        t2.grid_first_time,
         |        t1.scenic_id,
         |        t1.scenic_name
         |    FROM
         |        dim.dim_scenic_grid AS t1
         |        INNER JOIN ( SELECT mdn, grid_id, duration, grid_first_time FROM dwi.dwi_staypoint_msk_d WHERE day_id = $day_id ) AS t2 ON t1.grid = t2.grid_id
         |    ) AS t8
         |
         |    INNER JOIN
         |
         |    (
         |    SELECT
         |        /*+ broadcast(t7) */
         |        t6.mdn,
         |        t7.city_id
         |    FROM
         |        (
         |        SELECT
         |            t5.mdn,
         |            t5.resi_county_id
         |        FROM
         |            (
         |            SELECT
         |                /*+ broadcast(t4) */
         |                t3.mdn,
         |                t3.resi_grid_id,
         |                t3.resi_county_id
         |            FROM
         |                ( SELECT mdn, resi_grid_id, resi_county_id FROM dim.dim_usertag_msk_m WHERE month_id = $month_id ) AS t3
         |                LEFT JOIN dim.dim_scenic_grid AS t4 ON t3.resi_grid_id = t4.grid
         |            WHERE
         |                t4.grid IS NULL
         |            ) AS t5
         |        ) AS t6
         |        INNER JOIN dim.dim_admincode AS t7 ON t6.resi_county_id = t7.county_id
         |    ) AS t9 ON t8.mdn = t9.mdn
         |
         |GROUP BY
         |    t8.mdn,
         |    t9.city_id,
         |    t8.scenic_id,
         |    t8.scenic_name
         |
         |""".stripMargin)

    //保存数据
    resultDF
      .write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dal_tour/dal_tour_scenic_tourist_msk_d/day_id=$day_id")

    //增加分区
    spark.sql(s"alter table  dal_tour.dal_tour_scenic_tourist_msk_d add if not exists partition(day_id='$day_id')")

  }
}
