package cn.doitedu

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.SparkSession

object Job01_GpsReferenceToGeohash {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .master("local")
      .appName("gps地理位置参考点加工成geohash码维表")
      .enableHiveSupport()
      .config("spark.sql.shuffle.partitions", "1")
      .getOrCreate()


    // 读mysql中的gps参考点数据表
    val df = spark.read.format("jdbc")
      .option("url", "jdbc:mysql://doitedu:3306/realtimedw")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("dbtable", "lv4_reference_gps")
      .option("user", "root")
      .option("password", "root")
      .load()
      .where("bd09_lng is not null  and bd09_lat is not null")

    df.createOrReplaceTempView("gps_reference")


    // 自定义一个gps坐标转geohash码的函数
    val gps2Geo = (lat:Double,lng:Double)=>{
      GeoHash.geoHashStringWithCharacterPrecision(lat,lng,6)
    }

    // 注册
    spark.udf.register("gps_to_geo",gps2Geo)


    //
    //
    //
    val df2 = spark.sql(
      """
        |
        |insert into table dim.geohash_area
        |select
        |    geo_hash,
        |    province,
        |    city,
        |    region
        |from (
        |    select
        |      gps_to_geo(bd09_lat,bd09_lng) as geo_hash,
        |      row_number() over(partition by gps_to_geo(bd09_lat,bd09_lng) order by province ) as rn ,
        |      province,
        |      city,
        |      region
        |    from gps_reference ) o
        |where rn=1
        |""".stripMargin)

  }
}
