package etl

import java.util.Properties

import Configer.Configer
import ch.hsr.geohash.GeoHash
import org.apache.commons.lang.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SQLContext, SaveMode}
import org.apache.spark.{SparkConf, SparkContext}
import utils.GetBusiness

//商圈信息写入mysql
object LatLongBusinessToMysql {
  def main(args: Array[String]): Unit = {
    //saprkcontext
    val conf = new SparkConf().setMaster("local[*]").setAppName(s"${this.getClass.getName}")
      .set("spark.serializer",Configer.serializer)
    val sc = new SparkContext(conf)
    val sQLContext = new SQLContext(sc)
    //读取数据
    val dataFrame = sQLContext.read.parquet("E:\\小牛项目\\DMP广告项目34期\\资料PDF\\parquet")
    //数据转换
    val resultRDD: RDD[(String, String)] = dataFrame.select("lat", "long")
      .filter("cast(lat as double)>= 3.52 and cast(lat as double) <=53.33 and cast(long as double) >= 73.40 and cast(long as double) <= 135.20")
      .distinct()
      .map(row => {
        val lat = row.getAs[String]("lat")
        val long = row.getAs[String]("long")
        //获取商圈的信息
        val business: String = GetBusiness.getBusiness(lat, long)
        //geohash
        val geoHash = GeoHash.withCharacterPrecision(lat.toDouble, long.toDouble, 8).toBase32
        (geoHash, business)
      }).filter(arr=>StringUtils.isNotEmpty(arr._2))
    //存储
    import sQLContext.implicits._
    val props = new Properties()
    props.setProperty("driver",Configer.driver)
    props.setProperty("user",Configer.user)
    props.setProperty("password",Configer.password)
    resultRDD.map(arr=>(arr._1,arr._2)).toDF("geohash","business")
      .write.mode(SaveMode.Overwrite)
      .jdbc(Configer.url,"Area34",props)
    //释放资源
    sc.stop()
  }
}
