package com.shujia.dws

import java.awt.geom.Point2D

import com.shujia.common.grid.Grid
import com.shujia.common.util.SparkTool
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}
import org.apache.spark.sql.{Dataset, Row, SaveMode, SparkSession}

object DWSStayPointMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {

    import org.apache.spark.sql.functions._
    import spark.implicits._


    /**
      * 自定义函数获取网格中心点的经度
      *
      */

    val getLongi: UserDefinedFunction = udf((grid: String) => {
      val point: Point2D.Double = Grid.getCenter(grid.toLong)
      point.getX
    })

    //获取维度
    val getLati: UserDefinedFunction = udf((grid: String) => {
      val point: Point2D.Double = Grid.getCenter(grid.toLong)
      point.getY
    })


    /**
      *
      * 读取位置融合表
      */

    val stayPoint: Dataset[Row] = spark
      .table("dwd.dwd_res_regn_mergelocation_msk_d")
      .where($"day_id" === day_id)
      //取出开始时间和结束时间
      .withColumn("start_date", split($"start_time", ",")(1))
      .withColumn("end_date", split($"start_time", ",")(0))
      //按手机号分组，按时间排序取上一条数据的网格编号
      .withColumn("be_grid", lag($"grid_id", 1, "") over Window.partitionBy($"mdn").orderBy($"start_date"))
      //如果当前网格编号和上一条数据的网格编号一样打上标记0，如果不一样打上标记1
      .withColumn("flag", when($"be_grid" === $"grid_id", 0).otherwise(1))
      //按手机号分组按时间排序，对flag进行累加求和，将一个人在一个网格中连续的数据分到同一个组中
      .withColumn("clazz", sum($"flag") over Window.partitionBy($"mdn").orderBy($"start_date"))
      //获取第一个点的开始时间
      //取最后一点的结束时间
      .groupBy($"mdn", $"grid_id", $"clazz", $"county_id")
      .agg(min($"start_date") as "grid_first_time", max($"end_date") as "grid_last_time")
      //计算用户在网格中的停留时间
      .withColumn("duration", (unix_timestamp($"grid_last_time", "yyyyMMddHHmmss") - unix_timestamp($"grid_first_time", "yyyyMMddHHmmss")) / 60)
      //获取网格中心点的经纬度
      .withColumn("longi", getLongi($"grid_id"))
      .withColumn("lati", getLati($"grid_id"))
      //整理数据
      .select($"mdn", round($"longi", 4), round($"lati", 4), $"grid_id", $"county_id", round($"duration", 4), $"grid_first_time", $"grid_last_time")


    //保存数据
    stayPoint
      .write
      .format("csv")
      .option("sep", "\t")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_staypoint_msk_d/day_id=$day_id")

    //增加分区
    spark.sql(s"alter table dws.dws_staypoint_msk_d add if not exists partition(day_id='$day_id') ")

  }
}


