package com.shujia.dws

import com.shujia.common.utils.SparkMain
import com.shujia.common.utils.poly.Polygon
import com.shujia.dws.DwsCityTouristMskDay.dayId
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DwsScenicTouristMskDay extends SparkMain {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 传入边界 以及 经纬度 判断 该经纬度是否在边界内部
    val isInBoundary: UserDefinedFunction = udf((boundary: String, longi: String, lati: String) => {
      new Polygon(boundary).contains(longi.toDouble, lati.toDouble)
    })


    // 加载景区配置维表数据、用户画像维表数据 以及 位置数据融合表
    val scenicGridDF: DataFrame = spark.table("dim.dim_scenic_grid")
      // 将每个景区的每个网格提取出来 便于关联
      .select(
        $"scenic_id"
        , $"scenic_name"
        , $"grids"
        , explode(split($"grids", "#")) as "grid"
      )

    val mergeDF: DataFrame = spark.table("dwi.dwi_res_regn_mergelocation_msk_d").where($"day_id" === dayId)
    val userTagDF: DataFrame = spark.table("dim.dim_usertag_msk_d")
      .where($"day_id" === dayId)
      // 将需要用到的列查出来
      .select(
        $"mdn"
        , $"resi_grid_id"
        , $"resi_county_id"
      )

    /**
     * 使用网格id关联
     */
    val scenicTouristDF: DataFrame = mergeDF
      // 直接使用inner join进行过滤 能够关联上的位置记录 即表示该位置属于景区内
      .join(scenicGridDF, $"grid_id" === $"grid", "inner")
      .select($"mdn", $"start_date", $"end_date", $"scenic_id", $"scenic_name", $"grids")
      // 经过过滤之后 剩下的位置记录应该都数据景区内 接下来再计算停留时间
      // 以用户最早进入景区的时间作为到达时间，最大的ent_date做为离开时间 两者相减即为停留时间
      .groupBy($"mdn", $"scenic_id", $"scenic_name", $"grids")
      .agg(min($"start_date") as "min_start_date", max($"end_date") as "max_end_date")
      .withColumn("d_stay_time", diffTime($"max_end_date", $"min_start_date"))
      .where($"d_stay_time" > 30 * 60)
      // 关联用户画像维度表 获取用户居住地的网格id
      .join(userTagDF, List("mdn"), "inner")
      //  判断居住地是否在景区内
      .where(!array_contains(split($"grids", "#"), $"resi_grid_id"))
      // 整合数据
      .select(
        $"mdn"
        , $"resi_county_id" as "source_county_id"
        , $"scenic_id" as "d_scenic_id"
        , $"scenic_name" as "d_scenic_name"
        , $"min_start_date" as "d_arrive_time"
        , round($"d_stay_time" / 60 / 60, 2) as "d_stay_time"
      )
      .distinct()

    // 保存数据
    scenicTouristDF
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_scenic_tourist_msk_d/day_id=$dayId")

    spark.sql(
      s"""
         |alter table dws.dws_scenic_tourist_msk_d add if not exists partition(day_id='$dayId')
         |""".stripMargin)

  }
}
