package com.shujia.utils

import com.shujia.utils.poly.Polygon
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.udf

abstract class SparkTool extends Logging {
  var day_id: String = _

  /**
   * 传入两个点的经纬度 计算距离
   */
  val calculateLength: UserDefinedFunction = udf((longi1: Double, lati1: Double, longi2: Double, lati2: Double) => {
    Geography.calculateLength(longi1, lati1, longi2, lati2)
  })

  // 传入两个yyyyMMddHHmmss格式的时间字符串 计算时间差 单位秒
  val dateDiff: UserDefinedFunction = udf((dateStr1: String, dateStr2: String) => {
    DateUtils.diffDate(dateStr1, dateStr2)
  })

  //计算两个网格距离

  val calculateLengthByGrid: UserDefinedFunction = udf((grid1: String, grid2: String) => {
    //计算距离
    Geography.calculateLength(grid1.toLong, grid2.toLong)
  })

  // 给定一个网格id 判断是否在一个边界内
  val isInBoundaryWithGrid: UserDefinedFunction = udf((grid: String, boundary: String) => {
    new Polygon(boundary).contains(grid.toLong)
  })

  def main(args: Array[String]): Unit = {
    log.info("=====================开始启动Spark程序=====================")
    // 通过参数传入day_id
    if (args.length == 0) {
      log.error("请指定分区日期day_id!")
      return
    }

    day_id = args(0)

    log.info(s"当前正在处理的分区为:$day_id")

    // 基于ods的数据构建 位置数据融合表
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getName.replace("$", ""))
      .enableHiveSupport() // 开启Hive的支持
      .getOrCreate()


    // 调用run方法
    run(spark)

    log.info("=====================Spark程序运行结束=====================")

  }

  // 定义一个抽象的run方法 需要由子类实现
  def run(spark: SparkSession): Unit
}
