package cn.doitedu

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.udf

import java.util.Properties

object Job01_GeoHashETL {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("geohash地理位置维表加工")
      .master("local")
      .enableHiveSupport()
      .getOrCreate()


    val props = new Properties()
    props.setProperty("user", "root")
    props.setProperty("password", "root")
    val df = spark.read.jdbc("jdbc:mysql://doitedu:3306/realtimedw?useSSL=false", "tmp_area", props)

    df.createOrReplaceTempView("tmp")

    val gps_geohash = udf((lat: Double, lng: Double) => {
      GeoHash.geoHashStringWithCharacterPrecision(lat, lng, 6)
    })

    // 注册函数
    spark.udf.register("geo_hash", gps_geohash)


    val frame = spark.sql(
      """
        |insert overwrite table dim.geohash_area
        |select
        |  geo_hash,
        |  province,
        |  city,
        |  region
        |from (
        |select
        |     geo_hash,
        |     province,
        |     city,
        |     region,
        |     row_number() over(partition by geo_hash order by province) as rn
        |from (
        |    select
        |        geo_hash(lat,lng) as geo_hash,
        |        province,
        |        city,
        |        region
        |    from tmp ) o1
        |) o2
        |where rn=1
        |""".stripMargin)
  }

}
