package com.shujia.dws

import com.shujia.common.SparkTool
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.{DataFrame, Dataset, Row, SaveMode, SparkSession}

object DwsSpacetimeCompanionMskDay extends SparkTool {
  override def run(spark: SparkSession): Unit = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 加载MySQL数据库中的确证人员名单
    val confirmMdnDF: DataFrame = spark
      .read
      .format("jdbc")
      .option("url", "jdbc:mysql://master:3306/crm")
      .option("dbtable", "confirm_mdn")
      .option("user", "root")
      .option("password", "123456")
      .load()

    // 将DF转成RDD再转成本地集合，再进行广播，用于过滤
    val confirmMdnSet: Set[String] = confirmMdnDF
      .as[String]
      .rdd
      .collect()
      .toSet

    val confirmMdnSetBro: Broadcast[Set[String]] = spark.sparkContext.broadcast(confirmMdnSet)

    // 加载dwi层位置数据融合表
    val mergeDF: DataFrame = spark.table("dwi.dwi_res_regn_mergelocation_msk_d").where($"day_id" === day_id)

    // 基于确诊人员名单将确诊人员的行动轨迹筛选出来
    val confirmPosDF: DataFrame = mergeDF.filter(row => confirmMdnSetBro.value.contains(row.getAs[String]("mdn")))
      .select($"mdn", $"start_date", $"longi", $"lati", $"county_id")
      .withColumnRenamed("mdn", "c_mdn")
      .withColumnRenamed("start_date", "c_start_date")
      .withColumnRenamed("longi", "c_longi")
      .withColumnRenamed("lati", "c_lati")

    // 并将正常人员的行动轨迹也筛选出来
    val normalPosDF: DataFrame = mergeDF.filter(row => !confirmMdnSetBro.value.contains(row.getAs[String]("mdn")))

    // 基于区县ID进行关联，计算距离差、时间差
    val spacetimeDF = normalPosDF
      .join(confirmPosDF.hint("broadcast"), List("county_id"), "inner")
      // 计算距离差
      .withColumn("distance", calLengthWithLgLat($"longi", $"lati", $"c_longi", $"c_lati"))
      .where($"distance" < 500)
      // 计算时间差
      .withColumn("diff_time", abs(unix_timestamp($"start_date", "yyyyMMddHHmmss") - unix_timestamp($"c_start_date", "yyyyMMddHHmmss")))
      .where($"diff_time" < 60 * 60)
      // 整理数据
      .select(
        $"mdn"
        , $"start_date"
        , $"end_date"
        , $"county_id"
        , $"longi"
        , $"lati"
        , $"bsid"
        , $"grid_id"
        , $"c_mdn"
        , $"c_start_date"
        , $"c_longi"
        , $"c_lati"
      )

    spacetimeDF.cache()

    spacetimeDF
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_spacetime_companion_msk_d/day_id=$day_id")

    // 增加分区
    spark.sql(
      s"""
         |alter table dws.dws_spacetime_companion_msk_d add if not exists partition(day_id=$day_id)
         |""".stripMargin)

    spacetimeDF
      .select($"mdn")
      .distinct()
      .write
      .format("csv")
      .option("sep", ",")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_spacetime_companion_mdn_msk_d/day_id=$day_id")

    // 增加分区
    spark.sql(
      s"""
         |alter table dws.dws_spacetime_companion_mdn_msk_d add if not exists partition(day_id=$day_id)
         |""".stripMargin)
  }
}
