package com.shujia.dws

import com.shujia.common.SparkTool
import com.shujia.common.utils.Geography
import com.shujia.common.utils.poly.Polygon
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}

object DwsScenicTouristMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 分别加载位置数据融合表、用户画像维度表、景区网格配置维度表
    val mergeDF: DataFrame = spark.table("dwi.dwi_res_regn_mergelocation_msk_d").where($"day_id" === dayId)

    val userTagDF: DataFrame = spark.table("dim.dim_usertag_msk_d")
      .where($"day_id" === dayId)
      // 将需要用到得的字段提取出来
      .select($"mdn", $"resi_grid_id", $"resi_county_id")

    val scenicDF: DataFrame = spark.table("dim.dim_scenic_grid")
      // 将景区网格数据中的每一个网格id展开来
      .select($"scenic_id", $"scenic_name", $"grids", explode(split($"grids", "\\|")) as "grid_id")

    val scenicTouristDF: DataFrame = mergeDF
      // 通过关联进行过滤，将景区内的位置记录过滤出来
      .join(scenicDF, List("grid_id"), "inner")
      // 基于用户位置数据将其在景区内的位置记录过滤出来
      // 计算停留时间
      .withColumn("min_start_date", min($"start_date") over Window.partitionBy($"mdn", $"scenic_id"))
      .withColumn("max_end_date", max($"end_date") over Window.partitionBy($"mdn", $"scenic_id"))
      .withColumn("d_stay_time", unix_timestamp($"max_end_date", "yyyyMMddHHmmss") - unix_timestamp($"min_start_date", "yyyyMMddHHmmss"))
      // 过滤出停留时间超过30分钟的游客
      .where($"d_stay_time" > 30 * 60)
      .select($"mdn", $"scenic_id", $"scenic_name", $"grids", $"min_start_date" as "d_arrive_time", $"d_stay_time")
      .distinct()
      // 关联用户画像表获取用户的居住地，判断居住地是否在景区内
      .join(userTagDF, List("mdn"), "left")
      .where(!array_contains(split($"grids", "|"), $"resi_grid_id"))
      .select(
        $"mdn"
        , $"resi_county_id" as "source_county_id"
        , $"scenic_id" as "d_scenic_id"
        , $"scenic_name" as "d_scenic_name"
        , $"d_arrive_time"
        , $"d_stay_time"
      )

    // 将结果数据写入对应的结果目录
    scenicTouristDF
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_scenic_tourist_msk_d/day_id=$dayId")

    // 给表增加分区
    spark.sql(
      s"""
         |alter table dws.dws_scenic_tourist_msk_d add if not exists partition(day_id=$dayId)
         |""".stripMargin)
  }
}
