package com.doit.dophin.utils

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.SparkSession

import java.util.Properties

/**
 * @DATE 2022/3/22/14:58
 * @Author MDK
 * @Version 2021.2.2
 *
 *
 * 一次性操作：生成geohash地理位置参考字典表
 * 先建好维表层的库：  create database dim;
 *
 * */
object GeohashReferenceDict {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("gps参考坐标数据加工成geohash字典")
      //.master("local")
      .enableHiveSupport()
      .getOrCreate()

    val properties = new Properties()

    properties.load(GeohashReferenceDict.getClass.getClassLoader.getResourceAsStream("db.properties"))

    val df = spark.read.jdbc(properties.getProperty("url"), "t_md_areas", properties)
    df.createTempView("df")

    //定义一个将gps坐标转换成geohash的函数
    val gps2GeoHash = (lat: Double, lng: Double) => GeoHash.geoHashStringWithCharacterPrecision(lat, lng, 5)
    spark.udf.register("gps2geo", gps2GeoHash)

    val res = spark.sql(
      """
        |
        |select
        |   geohash,
        |   province,
        |   city,
        |   region
        |from (
        |    select
        |        gps2geo(l4.bd09_lat,l4.bd09_lng) as geohash,
        |        l1.areaname as province,
        |        l2.areaname as city,
        |        l3.areaname as region
        |    from df as l4 join df l3 on l4.parentid=l3.id and l4.level=4
        |               join df l2 on l3.parentid=l2.id
        |               join df l1 on l2.parentid=l1.id
        |
        |    UNION ALL
        |
        |    select
        |        gps2geo(l3.bd09_lat,l3.bd09_lng) as geohash,
        |        l1.areaname as province,
        |        l2.areaname as city,
        |        l3.areaname as region
        |    from df as l3 join df l2 on l3.parentid=l2.id and l3.level=3
        |               join df l1 on l2.parentid=l1.id
        |) o
        |group by geohash,province,city,region
        |
        |""".stripMargin)

    res.write.saveAsTable("dim.ref_geo")

    spark.close()
  }

}
