package cn.doitedu.dwetl.area

import java.util.Properties

import ch.hsr.geohash.GeoHash
import cn.doitedu.commons.utils.SparkUtil
import org.apache.spark.sql.DataFrame

/**
 *
 * @Description
 * @Author hunter@doitedu.cn
 *
 */
object GeoDictGen {

  def main(args: Array[String]): Unit = {

    val spark = SparkUtil.getSparkSession(GeoDictGen.getClass.getSimpleName)
    import spark.implicits._

    // 加载mysql中的gps坐标地理位置字典
    val props = new Properties()
    props.setProperty("user","root")
    props.setProperty("password","123456")
    val tmp = spark.read.jdbc("jdbc:mysql://localhost:3306/realtimedw", "geo_tmp", props)
    tmp.show(10,false)


    // 将gps坐标转换成geohash编码
    val res: DataFrame = tmp.map(row=>{
      // 取出字段：|BD09_LAT |BD09_LNG |province|city  |district|
      val lat = row.getAs[Double]("BD09_LAT")
      val lng = row.getAs[Double]("BD09_LNG")
      val province = row.getAs[String]("province")
      val city = row.getAs[String]("city")
      val district = row.getAs[String]("district")

      // 调用geohash工具包，传入经纬度，返回geohash码
      val geo = GeoHash.geoHashStringWithCharacterPrecision(lat, lng, 5)

      // 组装行结果返回
      (geo,province,city,district)
    }).toDF("geo","province","city","district")

    // 将结果写入mysql表 geo_dict
    //res.write.jdbc("jdbc:mysql://localhost:3306/dicts?characterEncoding=utf-8&useUnicode=true","geo_dict",props)

    // 将结果存为parquet文件
    //res.write.parquet("/user/hive/warehouse/dim.db/geo_dict")

    // 将结果写入hive的表 dim.geo_dict
    res.write.saveAsTable("dim.geo_dict")


    spark.close()

  }


}
