package com.tour.dal

import com.shujia.common.util.Geography
import com.shujia.common.{Constants, SparkTool}
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}
import org.apache.spark.sql._

object CityTouristApp extends SparkTool {
  /**
    *
    * 在子类中实现run方法，编写自定义的代码逻辑
    *
    * @param spark ： spark 入口
    */
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._
    //行政区配置表
    val adminCode: DataFrame = spark.table("dim.dim_admincode")


    //停留表
    val stayPoint: DataFrame = spark
      .table(s"${Constants.DWD_DATABASE_NAME}.${Constants.STAYPOINT_TABLE_NAME}")
      .where($"day_id" === day_id)


    //用户画像表
    val userTag: DataFrame = spark
      .table(s"${Constants.DIM_DATABASE_NAME}.${Constants.USERTAG_TABLE_NAME}")
      .where($"month_id" === month_id)


    //计算两个网格点的距离
    val computeDistance: UserDefinedFunction = udf((grid1: String, grid2: String) => {
      //距离市米级别
      Geography.calculateLength(grid1.toLong, grid2.toLong)
    })


    /**
      * 计算停留时间
      * 计算出游最远距离
      */


    //1、计算每个人在每个城市中的停留时间

    stayPoint
      //关联行政区配置表，获取城市编号
      .join(adminCode.hint("broadcast"), "county_id")
      //根据手机号和城市编号分组计算停留时间
      .withColumn("sumDuration", sum("duration") over Window.partitionBy($"mdn", $"city_id"))
      //取出停留时间大于180的用户
      .where($"sumDuration" > 180)
      //关联用户画像表获取常住地的网格
      .join(userTag, "mdn")
      //计算用户出游的最远距离
      .withColumn("distance", computeDistance($"grid_id", $"resi_grid_id"))
      //获取最远距离
      .withColumn("d_max_distance", max($"distance") over Window.partitionBy($"mdn", $"city_id"))
      //取出最远距离大于10Km的游客
      .where($"d_max_distance" > 10000)
      //取出字段
      .select($"mdn", $"resi_county_id" as "source_county_id", $"city_id" as "d_city_id", round($"sumDuration" / 60, 2) as "d_stay_time", round($"d_max_distance" / 1000, 2) as "d_max_distance")
      //取出重复数据
      .distinct()


      //保存数据
      .write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"${Constants.CITY_TOURIST_PATH_NAME}day_id=$day_id")


    //增加分区
    spark.sql(s"alter table ${Constants.DAL_TOUR_DATABASE_NAME}.${Constants.CITY_TOURIST_TABLE_NAME} add if not exists partition(day_id='$day_id')")


  }
}
