package com.shujia.common.utils

import com.shujia.common.utils.poly.Polygon
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.udf
import org.slf4j.Logger


abstract class SparkMain extends Logging {

  val logger: Logger = log
  var dayId: String = _

  // 将计算两个经纬度之间的距离方法封装成UDF函数，这样才能作用在DataFrame之上
  val calculateLengthWithLongiLati: UserDefinedFunction = udf((longi: String, lati: String, last_longi: String, last_lati: String) => {
    Geography.calculateLength(longi.toDouble, lati.toDouble, last_longi.toDouble, last_lati.toDouble)
  })

  val calculateLengthWithGrid: UserDefinedFunction = udf((grid_id1: String, grid_id2: String) => {
    Geography.calculateLength(grid_id1.toLong, grid_id2.toLong)
  })

  val diffTime: UserDefinedFunction = udf((dateStr1: String, dateStr2: String) => {
    DateUtil.diff_time(dateStr1, dateStr2)
  })

  // 传入边界 以及 网格id 判断 该网格id是否在边界内部
  val isInBoundaryWithGrid: UserDefinedFunction = udf((boundary: String, grid_id: String) => {
    new Polygon(boundary).contains(grid_id.toLong)
  })

  def main(args: Array[String]): Unit = {
    // 接受分区日期参数
    if (args.isEmpty) {
      logger.error("请指定分区日期！")
      return
    }

    dayId = args.head

    // 构建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .enableHiveSupport() // 开启hive的支持
      .getOrCreate()


    logger.info("###################开始执行Spark任务####################")
    run(spark)
    logger.info("###################Spark任务运行完成####################")


  }

  def run(spark: SparkSession): Unit

}
