package com.shujia.utils

import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.udf

/**
  * spark 工具，将通用的代码封装到父类中
  *
  * 继承Logging类，使用log打印日志
  */
abstract class SparkTool extends Logging {


  /**
    * spark中的自定义函数
    *
    */

  //计算两个经纬度距离
  val calculateLength: UserDefinedFunction = udf((longi: String, lati: String, last_longi: String, last_lati: String) => {
    //计算距离
    Geography.calculateLength(longi.toDouble, lati.toDouble, last_longi.toDouble, last_lati.toDouble)
  })

  //计算两个网格距离

  val calculateLengthByGrid: UserDefinedFunction = udf((grid1: String, grid2: String) => {
    //计算距离
    Geography.calculateLength(grid1.toLong, grid2.toLong)
  })


  var day_id: String = _

  def main(args: Array[String]): Unit = {

    //1、获取时间参数
    if (args.length == 0) {

      log.error("请传入时间参数")

      return
    }

    day_id = args.head
    log.info(s"当前处理的分区：$day_id")


    //获取类名
    val simpleName: String = this.getClass.getSimpleName.replace("$", "")


    //2、创建spark的环境
    val spark: SparkSession = SparkSession
      .builder()
      .appName(simpleName)
      //开启hive元数据支持
      .enableHiveSupport()
      .getOrCreate()

    //调用子类的方法，将spark传过去
    this.run(spark)
  }

  def run(spark: SparkSession)

}
