package etl

import java.util.Properties

import Configer.Config
import ch.hsr.geohash.GeoHash
import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
import util.GetBaiduBusiness

//用经纬度活的商圈的信息 存到本地库
object LatLong2Buss {
  def main(args: Array[String]): Unit = {
    //sparkcontext
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}")
    conf.setMaster("local[*]")
    conf.set("spark.serializer",Config.serializer)

    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)

    //读取数据
    val dataframe = sQLContext.read.parquet(Config.parquetPath)

    //处理数据
    import sQLContext.implicits._
    val props = new Properties()
    props.setProperty("driver",Config.driver)
    props.setProperty("user",Config.user)
    props.setProperty("password",Config.password)
    dataframe.select("lat","long").filter("lat>=3.52 and lat<=53.33 and long >=73.4 and long<=135.230")
      .distinct()//.show()
      .map(row=>{
      val lat = row.getAs[String]("lat")
      val long = row.getAs[String]("long")
      val business = GetBaiduBusiness.getBaiduBuiss(lat,long).toString
      val geoHash = GeoHash.withCharacterPrecision(lat.toDouble,long.toDouble,8).toBase32
      (geoHash,business)
    }).toDF("geoHash","business").write.mode(SaveMode.Overwrite).jdbc(Config.url,Config.table,props)

    //释放资源
    sc.stop()
  }
}
