package org.csdn.titan

import java.util.Properties

import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.SparkSession

object GeoHashDict {

  def main(args: Array[String]): Unit = {
    val spark:SparkSession = SparkSession.builder()
      .appName(this.getClass.getName)
      .master("local[*]")
      .getOrCreate();

    val url = "jdbc:mysql://localhost:3306/csdn_titan?characterEncoding=UTF-8";
    val table = "t_md_areas"
    var pro = new Properties();
    import scala.collection.JavaConverters._
    val map = Map("user"->"root","password"-> "123456").asJava

    pro.putAll(map);
    val df = spark.read.jdbc(url,table,pro);
    df.createOrReplaceTempView("t_md_areas");
    import spark.implicits._;

    val resultdf = spark.sql("select t.ID,t.AREANAME as DISTRICTNAME,t.BD09_LNG,t.BD09_LAT,t.GCJ02_LNG,t.GCJ02_LAT,t.WGS84_LNG,t.WGS84_LAT,s.AREANAME as CITYNAME,p.AREANAME as PROVICENAME from t_md_areas t " +
      "LEFT JOIN t_md_areas s on t.PARENTID=s.ID and s.LEVEL=2 " +
      "LEFT JOIN t_md_areas p on s.PARENTID=p.ID and p.LEVEL=1 " +
      "where t.LEVEL=3").map(row => {
      val dcode = row.getAs[Int]("ID")
      val dname = row.getAs[String]("DISTRICTNAME")
      val d_blng = row.getAs[Double]("BD09_LNG")
      val d_blat = row.getAs[Double]("BD09_LAT")
      val d_tlng = row.getAs[Double]("WGS84_LNG")
      val d_tlat = row.getAs[Double]("WGS84_LAT")
      val d_glng = row.getAs[Double]("GCJ02_LNG")
      val d_glat = row.getAs[Double]("GCJ02_LAT")
      val cname = row.getAs[String]("CITYNAME")
      val pname = row.getAs[String]("PROVICENAME")
      val geocode = GeoHash.geoHashStringWithCharacterPrecision(d_blat,d_blng,5)
      (geocode,dcode,dname,cname,pname,d_blng,d_blat,d_tlng,d_tlat,d_glng,d_glat)
    }).toDF("geocode","districtcode","districtname","cityname","provicename","blng","blat","tlng","tlat","glng","glat")

    //resultdf.show(10,false)

    resultdf.write.mode("overwrite").jdbc(url,"district_dicts",pro)

    spark.stop();


  }

}
