package cn.doitedu.dw_etl

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.{DataFrame, SparkSession}

import java.util.Properties

/**
 * @author 涛哥
 * @nick_name "deep as the sea"
 * @contact qq:657270652 wx:doit_edu
 * @site www.doitedu.cn
 * @date 2021-12-14
 * @desc
 *
   create table geo_area(province string,city string,
   district string,geohash string)
   stored as orc
   tblproperties('orc.compress'='snappy');
 *
 */
object ReferenceGpsDict {
  def main(args: Array[String]): Unit = {

    if(args.size<4){
      println(
        """
          |
          |params not enough!
          |usage:
          |  args(0): mysql数据库登录用户名
          |  args(1): mysql数据库登录密码
          |  args(2): mysql数据库jdbc连接地址
          |  args(4): gps参考点坐标信息表名
          |
          |""".stripMargin)
      sys.exit(1)
    }

    val spark: SparkSession = SparkSession.builder()
      .appName("gps参考点转geohash码")
      //.config("spark.sql.shuffle.partitions","1")
      //.master("local")
      .enableHiveSupport()
      .getOrCreate()

    // 读取mysql中的原始参考坐标点表
    val props = new Properties()
    props.setProperty("user",args(0))
    props.setProperty("password",args(1))
    val df: DataFrame = spark.read.jdbc(args(2), args(3), props)

    df.createTempView("df")

    val gps2GeoHash = (lng:Double,lat:Double)=>{ GeoHash.geoHashStringWithCharacterPrecision(lat,lng,6) }
    spark.udf.register("geohash",gps2GeoHash)


    // 选取里面的3、4级行政单位并找到所属的上级行政单位，并把gps坐标转成geohash编码
    val tmp = spark.sql(
      """
        |insert into table dim.geo_area
        |-- 选取所有4级行政单位参考点
        |select
        |   l1.areaname as province,
        |   l2.areaname as city,
        |   l3.areaname as district,
        |   geohash(l4.bd09_lng,l4.bd09_lat) as geohash
        |
        |from df l4 join df l3 on l4.parentid=l3.id and l4.level=4
        |           join df l2 on l3.parentid=l2.id
        |           join df l1 on l2.parentid=l1.id
        |
        |union all
        |
        |-- 选取所有3级行政单位参考点
        |select
        |   l1.areaname as province,
        |   l2.areaname as city,
        |   l3.areaname as district,
        |   geohash(l3.bd09_lng,l3.bd09_lat) as geohash
        |
        |from df l3 join df l2 on l3.parentid=l2.id  and l3.level=3
        |           join df l1 on l2.parentid=l1.id
        |
        |""".stripMargin)

    // 将结果存入hive
    //tmp.write.saveAsTable("dim.geo_area")


    spark.close()

  }

}
