package com.test

import java.util.Properties

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.SparkSession

/**
 * @Created by Orion
 * @Description
 */
object TestGetArea {
  // 47.20975707229347  123.65860307390196

  def main(args: Array[String]): Unit = {
    val session = SparkSession.builder()
      .master("local[*]")
      .appName("session-split")
      .enableHiveSupport()
      .getOrCreate()

    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "root")
    // 读取MySQL中的数据
    val df = session.read.jdbc("jdbc:mysql://localhost:3306/cool?useUnicode=true&characterEncoding=utf8", "tb_area_dict", properties)


    def mygeo(lat: Double, lnt: Double): String = {
      // GeoHash.geoHashStringWithCharacterPrecision(lat ,lnt , 6)
      val geoStr = GeoHash.withCharacterPrecision(lat, lnt, 6).toBase32
      geoStr
    }
    session.udf.register("mygeo", mygeo _)

    df.createOrReplaceTempView("tb_area_dict")
    session.sql(
      """
        |select
        |*
        |from
        |tb_area_dict
        |where  geostr = mygeo(47.20975707229347, 123.65860307390196)
        |
        |""".stripMargin).show()
  }

}
