package com.shujia.dwi

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import com.shujia.utils.{Geography, SparkTool}
import org.apache.spark.sql.expressions.{UserDefinedFunction, Window}

/**
  * 基于基础额数据提取出用户的位置信息构建位置融合表
  * 1、超速处理
  * 2、数据脱敏
  *
  */
object DwiResRegnMergelocationMskDay extends SparkTool {


  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._


    /**
      * spark中的自定义函数
      *
      */

    val calculateLength: UserDefinedFunction = udf((longi: String, lati: String, last_longi: String, last_lati: String) => {
      //计算距离
      Geography.calculateLength(longi.toDouble, lati.toDouble, last_longi.toDouble, last_lati.toDouble)
    })


    /**
      * 读取hive中oiid表
      *
      */

    val oiddDF: DataFrame = spark.table("ods.ods_oidd")

    val mergeDf: DataFrame = oiddDF
      //按时间过滤数据，取一天的数据
      .filter($"day_id" === day_id)
      .select($"mdn", $"start_time", $"county_id", $"longi", $"lati", $"bsid", $"grid_id")
      //数据去重
      .distinct()
      //开始时间
      .withColumn("start_date", split($"start_time", ",")(1))
      //结束时间
      .withColumn("end_date", split($"start_time", ",")(0))
      //计算时间差和距离差
      //1、获取上一条数据的位置
      .withColumn("last_grid", lag($"grid_id", 1) over Window.partitionBy($"mdn").orderBy($"start_date"))
      //2、如果当前位置和上一条数据的位置一致在后面记0，不一致记1
      .withColumn("flag", when($"grid_id" === $"last_grid", 0).otherwise(1))
      //3、增加类别
      .withColumn("clazz", sum("flag") over Window.partitionBy($"mdn").orderBy($"start_date"))

      //4、按照手机号分组获取开始时间和结束时间
      .groupBy($"mdn", $"county_id", $"grid_id", $"longi", $"bsid", $"lati", $"clazz")

      //取第一个点的开始时间和最后一个点的结束时间
      .agg(min($"start_date") as "start_date", max($"end_date") as "end_date")


      //获取上一条数据的时间
      .withColumn("last_date", lag($"start_date", 1) over Window.partitionBy($"mdn").orderBy($"start_date"))

      //计算时间差,秒级别
      .withColumn("diff_time", when($"last_date".isNull, 1).otherwise(unix_timestamp($"start_date", "yyyyMMddHHmmss") - unix_timestamp($"last_date", "yyyyMMddHHmmss")))


      //获取上一条数据的经纬度
      .withColumn("last_longi", lag($"longi", 1) over Window.partitionBy($"mdn").orderBy($"start_date"))
      .withColumn("last_lati", lag($"lati", 1) over Window.partitionBy($"mdn").orderBy($"start_date"))

      //计算距离,使用自定义的函数计算距离
      .withColumn("distance", when($"last_lati".isNull, 1).otherwise(calculateLength($"longi", $"lati", $"last_longi", $"last_lati")))

      //计算速度
      .withColumn("speed", round($"distance" / $"diff_time", 3))

      //过滤掉速度较大的数据

      .filter($"speed" <= 340)

      //取出需要的字段
      .select(
      upper(md5(concat($"mdn", expr("'shujia'")))) as "mdn", //对手机号加密
      $"start_date",
      $"end_date",
      $"county_id",
      $"longi",
      $"lati",
      $"bsid",
      $"grid_id"
    )


    mergeDf.write
      .format("csv")
      .mode(SaveMode.Overwrite)
      .option("sep", "\t")
      .save(s"/daas/motl/dwi/dwi_res_regn_mergelocation_msk_d/day_id=$day_id")

    /**
      * spark-submit --master local --class com.shujia.dwi.DwiResRegnMergelocationMskDay --jars common-1.0.jar dwi-1.0.jar
      *
      */

    /**
      * 增加分区
      */

    spark.sql(
      s"""
         |alter table dwi.dwi_res_regn_mergelocation_msk_d  add if not exists partition(day_id='$day_id')
         |
      """.stripMargin)
  }


}
