package com.shujia.dws

import com.shujia.common.utils.SparkTool
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, Dataset, Row, SaveMode, SparkSession}

object DwsCityTouristMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 加载位置数据融合表
    val mergeLocDF: DataFrame = spark.table("dwi.dwi_res_regn_mergelocation_msk_d")
      .where($"day_id" === day_id)

    // 加载用户画像表
    val userTagDF: DataFrame = spark.table("dim.dim_usertag_msk_d").where($"day_id" === day_id)

    // 加载行政区域配置表
    val adminCodeDF: DataFrame = spark.table("dim.dim_admincode")

    /**
     * 市游客：
     * 1、停留时间超过3小时
     * 2、出行距离大于30KM
     */

    // 关联行政区域配置表 将位置数据融合表中的区县id转换成市id
    mergeLocDF
      .join(adminCodeDF.hint("broadcast"), "county_id")
      .select($"mdn", $"start_date", $"end_date", $"county_id", $"grid_id", $"city_id")
      // 计算用户进入某城市的最早的时间
      .withColumn("min_start_date", min($"start_date") over Window.partitionBy($"mdn", $"city_id"))
      // 计算用户离开某城市的最晚的时间
      .withColumn("max_end_date", max($"end_date") over Window.partitionBy($"mdn", $"city_id"))
      // 统计每个用户在每个城市的停留时间
      .withColumn("d_stay_time", diffTime($"max_end_date", $"min_start_date"))
      .where($"d_stay_time" > 3 * 60 * 60)
      // 计算出行距离
      // 通过mdn关联用户画像数据，获取用户的居住地
      .join(userTagDF, "mdn")
      .select($"mdn", $"county_id", $"grid_id", $"city_id", $"d_stay_time", $"resi_grid_id", $"resi_county_id")
      // 统计每个用户的每个居住地到每个城市中的每个网格的距离，再取最大值
      .withColumn("distance", calculateLengthWithGrid($"grid_id", $"resi_grid_id"))
      .withColumn("d_max_distance", max($"distance") over Window.partitionBy($"mdn", $"resi_county_id", $"city_id"))
      .where($"d_max_distance" > 30 * 1000)
      .select($"mdn"
        , $"resi_county_id" as "source_county_id"
        , $"city_id" as "d_city_id"
        , round($"d_stay_time" / 60 / 60, 2) as "d_stay_time"
        , round($"d_max_distance" / 1000, 2) as "d_max_distance"
      )
      .distinct()
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_city_tourist_msk_d/day_id=$day_id")

    // 添加分区
    spark.sql(
      s"""
         |alter table dws.dws_city_tourist_msk_d  add if not exists partition(day_id='$day_id')
         |""".stripMargin)


  }
}
