import com.zyl.common.SparkTools
import com.zyl.common.grid.Geography
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}


/**
  * 计算省游客
  * 1、停留时间大于3小时
  * 2、出游距离大于10KM
  *
  */


object AdsProvinceTouristMskD extends SparkTools{

  override def run(spark: SparkSession): Unit = {

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val calculateLength: UserDefinedFunction = udf((p1: String, p2: String) => {
      Geography.calculateLength(p1.toLong, p2.toLong)
    })



    val staypoint: DataFrame = spark.table(s"$STAYPOINT_TABLE_NAME")

    val usertag: DataFrame = spark.table(s"$DIM_USERTAG_TABLE_NAME")

    val admin: DataFrame = spark.table(s"$DIM_ADMINCODE_TABLE_NAME")

    staypoint.join(usertag,"mdn")
      .join(admin,"county_id")
      .withColumn("d_stay_time",sum($"duration") over Window.partitionBy($"mdn",$"prov_id"))
      .where($"d_stay_time">180)
      .withColumn("distance",calculateLength($"grid_id",$"resi_grid_id"))
      .withColumn("d_max_distance",sum($"distance") over Window.partitionBy($"mdn",$"prov_id"))
      .where($"d_max_distance">10000)
      .select(
        $"mdn",
        $"resi_county_id" as "source_county_id",
        $"prov_id" as "d_province_id",
        round($"d_stay_time"/60,4),
        round($"d_max_distance"/1000,4)
      )
      .distinct()
      .write
      .format("csv")
      .option("sep","\t")
      .mode(SaveMode.Overwrite)
      .save(s"${ADS_PROVINCE_TOURIST_PATH}day_id="+day_id)

    spark.sql(
      s"""
        |
        |alter table $ADS_PROVINCE_TOURIST_TABLE_NAME add if not exists partition(day_id=$day_id)
      """.stripMargin)


  }
}
