package com.shujia.dws

import com.shujia.common.SparkTool
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DwsScenicTouristMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 分别加载用户画像数据、景区配置表、位置数据融合表
    val mergeDF: DataFrame = spark.table("dwi.dwi_res_regn_mergelocation_msk_d").where($"day_id" === day_id)
    val userTagDF: DataFrame = spark.table("dim.dim_usertag_msk_d").where($"day_id" === day_id)
      .select($"mdn", $"resi_county_id", $"resi_grid_id")
    val scenicDF: DataFrame = spark.table("dim.dim_scenic_grid")

    // 先将每个景区的所有网格id进行展开
    // 再通过网格id进行关联将用户位置在景区内的数据筛选出来
    scenicDF
      .select($"scenic_id", $"scenic_name", $"grids", explode(split($"grids", "\\|")) as "grid_id")
      .join(mergeDF, List("grid_id"), "inner")
      // 计算游客在景区内的停留时间
      .groupBy($"mdn", $"scenic_id", $"scenic_name", $"grids")
      // 求最小的开始时间以及最大的结束时间
      .agg(min($"start_date") as "min_start_date", max($"end_date") as "max_end_date")
      .withColumn("d_stay_time", unix_timestamp($"max_end_date", "yyyyMMddHHmmss") - unix_timestamp($"min_start_date", "yyyyMMddHHmmss"))
      .where($"d_stay_time" > 30 * 60)
      // 关联用户画像数据，找出用户居住地不在景区内的数据
      .join(userTagDF, List("mdn"), "left")
      .where(!array_contains(split($"grids", "\\|"), $"resi_grid_id"))
      .select(
        $"mdn"
        , $"resi_county_id" as "source_county_id"
        , $"scenic_id" as "d_scenic_id"
        , $"scenic_name" as "d_scenic_name"
        , $"min_start_date" as "d_arrive_time"
        , $"d_stay_time"
      )
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_scenic_tourist_msk_d/day_id=$day_id")

    // 增加分区
    spark.sql(
      s"""
         |alter table dws.dws_scenic_tourist_msk_d add if not exists partition(day_id=$day_id)
         |""".stripMargin)


  }
}
