package com.shujia.common

import com.shujia.util.poly.Polygon
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.udf

abstract class SparkTool extends Logging {
  var day_id: String = _

  // 给定一个经纬度 判断是否在一个边界内
  val isInBoundary: UserDefinedFunction = udf((longi: String, lati: String, boundary: String) => {
    new Polygon(boundary).contains(longi.toDouble, lati.toDouble)
  })

  // 给定一个网格id 判断是否在一个边界内
  val isInBoundaryWithGrid: UserDefinedFunction = udf((grid: String, boundary: String) => {
    new Polygon(boundary).contains(grid.toLong)
  })

  // 进行初始化
  def main(args: Array[String]): Unit = {
    if (args.isEmpty) {
      log.error("请指定分区日期！")
      return
    }
    day_id = args.head

    // 构建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .enableHiveSupport()
      .config("spark.sql.shuffle.partitions", "15")
      .getOrCreate()

    this.run(spark)
  }

  def run(spark: SparkSession): Unit

}
