package com.shujia.common

import com.shujia.common.poly.Polygon
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.udf
import org.slf4j.Logger

import java.awt.geom.Point2D

abstract class SparkTool extends Logging {
  val logger: Logger = log
  var day_id: String = _

  // 创建UDF函数，用于SparkSQL中，根据经纬度计算距离
  val calLengthWithLgLat = udf((lg1: Double, lat1: Double, lg2: Double, lat2: Double) => {
    Geography.calculateLength(lg1, lat1, lg2, lat2)
  })
  // 创建UDF函数，用于SparkSQL中，根据网格id计算距离
  val calLengthWithGrid = udf((grid_id1: Long, grid_id2: Long) => {
    Geography.calculateLength(grid_id1, grid_id2)
  })
  // 创建UDF函数，用于SparkSQL中，根据网格id获取中心点经纬度
  val getLgAndLatWithGrid = udf((grid_id: Long) => {
    val point: Point2D.Double = Grid.getCenter(grid_id)
    Array[Double](point.x, point.y)
  })

  val isInBoundaryWithLgLat = udf((boundary: String, longi: Double, lati: Double) => {
    new Polygon(boundary).contains(longi, lati)
  })

  def main(args: Array[String]): Unit = {

    if (args.isEmpty) {
      logger.error("请指定分区日志参数！")
      return
    }
    // 接收第一个传进来的参数作为分区日期day_id
    day_id = args.head

    logger.info(s"正在处理的分区为:$day_id")

    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getSimpleName.replace("$", ""))
      .enableHiveSupport()
      .getOrCreate()

    run(spark)
  }

  def run(spark: SparkSession): Unit
}
