package com.shujia.common.utils

import com.shujia.common.utils.poly.Polygon
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.udf

abstract class SparkTool extends Logging {
  var day_id: String = _

  // 将计算经纬度的方法注册成UDF函数
  val calculateLength: UserDefinedFunction = udf((lg1: Double, lat1: Double, lg2: Double, lat2: Double) => {
    Geography.calculateLength(lg1, lat1, lg2, lat2)
  })

  // 将计算两个网格id距离的方法注册成UDF函数
  val calculateLengthWithGrid: UserDefinedFunction = udf((grid_id1: Long, grid_id2: Long) => {
    Geography.calculateLength(grid_id1,grid_id2)
  })

  val isInScenicBoundary: UserDefinedFunction = udf((longi: Double, lati: Double, boundary: String) => {
    new Polygon(boundary).contains(longi, lati)
  })


  val diffTime: UserDefinedFunction = udf((dateStr1: String, dateStr2: String) => {
    DateUtils.diff_date_time(dateStr1, dateStr2)
  })

  // 将共有的部分放入main方法
  def main(args: Array[String]): Unit = {
    if (args.isEmpty) {
      log.error("请指定分区参数！")
      return
    }
    day_id = args.head
    log.info(s"正在处理的分区为：$day_id")

    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .enableHiveSupport()
      .getOrCreate()

    log.info("==========================开始执行Spark程序===============================")
    run(spark)
    log.info("==========================Spark程序执行完成===============================")

  }

  // 将特有的部分放入run方法
  def run(spark: SparkSession): Unit
}
