package cn.doitedu.dw.util

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.SparkSession

import java.util.Properties

object GeoHashDict {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder()
      .config("spark.sql.shuffle.partitions","2")
      .appName("地理位置知识库加工")
      .master("local")
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    val props = new Properties()
    props.load(GeoHashDict.getClass.getClassLoader.getResourceAsStream("db.properties"))

    val df = spark.read.jdbc(
      "jdbc:mysql://hadoop102:3306/realtimedw?useUnicode=true&characterEncoding=utf8",
      "t_md_areas",
      props)
    df.createTempView("df")

    val df2 = spark.sql(
      """
        |select
        |province.areaname as province,
        |city.areaname as city,
        |region.areaname as region,
        |region.bd09_lng as lng,
        |region.bd09_lat as lat
        |
        |from df region join df city on region.parentid = city.id and region.level = 3
        |               join df province on city.parentid = province.id
        |""".stripMargin)

    df2.show(20,false)

    val gps2geo = udf((lat: Double, lng: Double) => {
      GeoHash.geoHashStringWithCharacterPrecision(lat, lng, 5)
    })

    val res = df2.select('province, 'city, 'region, gps2geo('lat, 'lng) as "geohash")

    res.write.parquet("dataware/data/geodict")

    spark.close()
  }

}
