package com.shujia.dws

import com.shujia.common.{DateUtil, SparkTool}
import com.shujia.dws.DwsSpacetimeCompanionMskDay.day_id
import com.shujia.util.poly.Polygon
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DwsScenicTouristMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 给定一个经纬度 判断是否在一个边界内
    val isInBoundary: UserDefinedFunction = udf((longi: String, lati: String, boundary: String) => {
      new Polygon(boundary).contains(longi.toDouble, lati.toDouble)
    })


    // 计算两个时间字符串的时间差（单位：s）
    val diff_date: UserDefinedFunction = udf((date_str1: String, date_str2: String) => {
      DateUtil.diff_date(date_str1, date_str2)
    })

    // 加载景区网格配置表
    val scenicGridsDF: DataFrame = spark.table("dim.dim_scenic_grid")

    // 将景区包含的每个网格变成每一条数据
    val scenicGridDF: DataFrame = scenicGridsDF
      .select($"scenic_id", $"scenic_name", $"grids", explode(split($"grids", ",")) as "grid_id")

    // 加载用户画像表
    val userTagDF: DataFrame = spark.table("dim.dim_usertag_msk_d")

    // 加载位置数据融合表 约400w的数据
    val mergeLocDF: DataFrame = spark.table("dwi.dwi_res_regn_mergelocation_msk_d").where($"mdn" =!= "9740853D6AF8DB38FC15E82FDD97DD10" and $"day_id" === day_id)

    /**
     * 景区游客：
     * 1、停留时间超过30分钟
     * 2、目的地在景区内，居住地不在景区内
     */
    mergeLocDF
      // 直接通过网格id进行关联，如果能够关联上 则说明该位置记录在景区内部
      .join(scenicGridDF, "grid_id")
      // 计算用户在景区内的停留时间
      .groupBy($"mdn", $"scenic_id", $"scenic_name", $"grids")
      .agg(min($"start_date") as "d_arrive_time", max($"end_date") as "max_end_date")
      .withColumn("d_stay_time", diff_date($"d_arrive_time", $"max_end_date"))
      .where($"d_stay_time" > 30 * 60)
      // 关联用户画像表 获取居住地的位置
      .join(userTagDF, "mdn")
      // 将居住地不在景区内 游客过滤出来
      .where(!array_contains(split($"grids", ","), $"resi_grid_id"))
      .select($"mdn"
        , $"resi_county_id" as "source_county_id"
        , $"scenic_id" as "d_scenic_id"
        , $"scenic_name" as "d_scenic_name"
        , $"d_arrive_time"
        , $"d_stay_time"
      )
      .distinct()
      .write
      .mode(SaveMode.Overwrite)
      .format("csv")
      .option("sep", ",")
      .save(s"/daas/motl/dws/dws_scenic_tourist_msk_d/day_id=$day_id")


    spark.sql(
      s"""
         |alter table dws.dws_scenic_tourist_msk_d add if not exists partition(day_id='$day_id')
         |""".stripMargin)


  }
}
