package com.shujia.dws

import com.shujia.common.SparkTool
import com.shujia.common.utils.Geography
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}
import org.apache.spark.sql.{DataFrame, Dataset, Row, SaveMode, SparkSession}

object DwsCityTouristMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 基于两个网格id计算距离
    val calculateLengthWithGrid: UserDefinedFunction = udf((grid_id1: Long, grid_id2: Long) => {
      Geography.calculateLength(grid_id1, grid_id2)
    })

    // 分别加载位置数据融合表、用户画像维度表、行政区域维度表
    val mergeDF: DataFrame = spark.table("dwi.dwi_res_regn_mergelocation_msk_d").where($"day_id" === dayId)

    val userTagDF: DataFrame = spark.table("dim.dim_usertag_msk_d")
      .where($"day_id" === dayId)
      // 将需要用到得的字段提取出来
      .select($"mdn", $"resi_grid_id", $"resi_county_id")

    val adminCodeDF: DataFrame = spark.table("dim.dim_admincode")
      .select($"city_id", $"county_id")

    // 基于三分数据分别计算：出行距离、停留时间
    /**
     * 市游客：
     * 1、出行距离超过100KM
     * 2、停留时间大于2小时
     */

    val cityTouristDF: DataFrame = mergeDF
      // 关联用户画像数据 获取用户的居住地
      .join(userTagDF, List("mdn"), "inner")
      // 关联行政区域配置数据 将区县id 转换成 城市id
      .join(adminCodeDF, List("county_id"), "inner")
      // 计算每条位置的出行距离
      .withColumn("distance", calculateLengthWithGrid($"grid_id", $"resi_grid_id"))
      // 使用窗口函数分组统计每个用户的每个居住地到每个城市的出行距离，然后取最大的最为本次出行距离
      .withColumn("d_max_distance", max($"distance") over Window.partitionBy($"mdn", $"resi_grid_id", $"city_id"))
      // 过滤出出行距离超过100KM的记录
      .where($"d_max_distance" > 100 * 1000)
      // 计算停留时间
      .withColumn("min_start_date", min($"start_date") over Window.partitionBy($"mdn", $"city_id"))
      .withColumn("max_end_date", max($"end_date") over Window.partitionBy($"mdn", $"city_id"))
      .withColumn("d_stay_time", unix_timestamp($"max_end_date", "yyyyMMddHHmmss") - unix_timestamp($"min_start_date", "yyyyMMddHHmmss"))
      // 过滤出停留时间大于2小时的记录
      .where($"d_stay_time" > 2 * 60 * 60)
      // 整理数据
      .select(
        $"mdn"
        , $"resi_county_id" as "source_county_id"
        , $"city_id" as "d_city_id"
        , $"d_stay_time"
        , $"d_max_distance"
      )
      .distinct()

    // 将结果数据写入对应的结果目录
    cityTouristDF
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_city_tourist_msk_d/day_id=$dayId")

    // 给表增加分区
    spark.sql(
      s"""
         |alter table dws.dws_city_tourist_msk_d add if not exists partition(day_id=$dayId)
         |""".stripMargin)

  }
}
