package com.shujia.dwi

import com.shujia.common.{Geography, Grid, SparkTool}
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

import java.awt.geom.Point2D

object DwiResRegnMergelocationMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import org.apache.spark.sql.functions._
    import spark.implicits._




    // 从Hive中加载OIDD的数据
    val oiddDF: DataFrame = spark.table("ods.ods_oidd").where($"day_id" === day_id)

    // 分别计算时间差、距离差进而计算速度进行超速过滤
    oiddDF
      // 将数据中的开始时间、结束时间提取出来
      .withColumn("start_date", split($"start_time", ",")(1))
      .withColumn("end_date", split($"start_time", ",")(0))
      // 在时间轴上进行聚类：合并同一个用户在同一个网格内连续的位置记录
      // 按照开始时间对同一个用户的多条位置记录进行排序，取前一条位置记录的网格ID
      .withColumn("last_grid_id", lag($"grid_id", 1) over Window.partitionBy($"mdn").orderBy($"start_date"))
      // 对比当前位置记录中的网格id和上一条数据的网格id是否相等，相等置0，不等置1，形成flag列
      .withColumn("flag", when($"grid_id" === $"last_grid_id", 0).otherwise(1))
      // 对flag列进行sum累加求和，得到grp列
      .withColumn("grp", sum($"flag") over Window.partitionBy($"mdn").orderBy($"start_date"))
      // 基于grp进行分组，对同一个网格内的多条位置记录进行合并
      .groupBy($"grp", $"mdn", $"grid_id", $"bsid", $"county_id")
      .agg(min($"start_date") as "start_date", max($"end_date") as "end_date")
      // 基于开始时间进行排序，计算不同网格间相邻两条位置记录的时间差、距离差
      .withColumn("last_start_date", lag($"start_date", 1) over Window.partitionBy($"mdn").orderBy($"start_date"))
      .withColumn("diff_time", unix_timestamp($"start_date", "yyyyMMddHHmmss") - unix_timestamp($"last_start_date", "yyyyMMddHHmmss"))
      // 取上一条数据的网格id，并计算距离
      .withColumn("last_grid_id", lag($"grid_id", 1) over Window.partitionBy($"mdn").orderBy($"start_date"))
      .withColumn("distance", calLengthWithGrid($"grid_id", $"last_grid_id"))
      // 计算速度
      .withColumn("speed", round($"distance" / $"diff_time", 4))
      // 进行超速过滤
      .where($"speed" < 300)
      // 基于网格id，获取中心点的经纬度
      .withColumn("longi", round(getLgAndLatWithGrid($"grid_id")(0), 4))
      .withColumn("lati", round(getLgAndLatWithGrid($"grid_id")(1), 4))
      // 整理数据
      .select(
        md5($"mdn") as "mdn"
        , $"start_date"
        , $"end_date"
        , $"county_id"
        , $"longi"
        , $"lati"
        , $"bsid"
        , $"grid_id"
      )
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dwi/dwi_res_regn_mergelocation_msk_d/day_id=$day_id")

    // 增加分区
    spark.sql(
      s"""
         |alter table dwi.dwi_res_regn_mergelocation_msk_d add if not exists partition(day_id=$day_id)
         |""".stripMargin)
  }
}
