package cn.doitedu.dwh

import java.util.Properties

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.{SaveMode, SparkSession}


/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-06-08
 * @desc gps坐标地理位置参考点表，加工成geohash编码地理位置表
 */
object GeoReferenceTableProcess {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder()
      .appName("geo地理位置知识表加工")
      .master("local")
      .enableHiveSupport()
      .config("spark.sql.shuffle.partitions","10")
      .getOrCreate()


    // 从mysql中加载源表
    val props = new Properties()
    props.setProperty("user","root")
    props.setProperty("password","ABC123abc.123")
    val df = spark.read.jdbc("jdbc:mysql://hdp01:3306/realtimedw", "t_md_areas", props)
    df.cache()

    // 扁平化
    df.createTempView("area")
    val joined = spark.sql(
      """
        |
        |select
        |   p.areaname as province,
        |   c.areaname as city,
        |   r.areaname as region,
        |   x.bd09_lng as lng,
        |   x.bd09_lat as lat
        |from area x join area r on x.parentid=r.id and x.level=4
        |            join area c on r.parentid=c.id
        |            join area p on c.parentid=p.id
        |union all
        |
        |select
        |   p.areaname as province,
        |   c.areaname as city,
        |   r.areaname as region,
        |   r.bd09_lng as lng,
        |   r.bd09_lat as lat
        |from area r join area c on r.parentid=c.id and r.level=3
        |            join area p on c.parentid=p.id
        |
        |
        |""".stripMargin)

    /**
     * +--------------+------+--------+------------------+------------------+
     * |province      |city  |region  |lng               |lat               |
     * +--------------+------+--------+------------------+------------------+
     * |广西壮族自治区|梧州市|万秀区  |111.3274622713231 |23.475965380942814|
     * |广西壮族自治区|梧州市|万秀区  |111.32453237603883|23.478569493101986|
     * |广西壮族自治区|梧州市|万秀区  |111.30547195007331|23.485394636734455|
     */

    // gps坐标转geohash码
    import spark.implicits._
    import org.apache.spark.sql.functions._
    val gps2geo = udf((lng:Double, lat:Double)=>{
      GeoHash.geoHashStringWithCharacterPrecision(lat,lng,5)
    })
    /*spark.udf.register("geo",gps2geo)
    joined.selectExpr("province","city","region","geo(lng,lat)")*/

    val result = joined.select('province, 'city, 'region, gps2geo('lng, 'lat) as "geo")

    // 结果存入hive仓库
    result.write.mode(SaveMode.Overwrite).saveAsTable("dim.area_dict_geo")



    spark.close()
  }
}
