package org.yonggan.dmp.tools

import java.util.Properties

import org.apache.commons.lang.StringUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.yonggan.dmp.conf.ConfigManager
import org.yonggan.dmp.utils.{BaiDuMapApi, GeoHashEx}

/**
  * 提取用户经纬度数据
  */
object Parquet2BusinessHandler {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName("提取用户的经纬度")
      .setMaster("local[*]")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")

    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)

    // 读取文件
    val baseDF = sqlContext.read.parquet(ConfigManager.PARQUET_OUT)

    import sqlContext.implicits._
    val resultDF = baseDF.select("long", "lat")
      .filter(
        """
         long <= 135.05 and long >=  73.66
         and lat <= 53.55 and lat >=   3.86
        """.stripMargin)
      .map(row => {
        // 纬度
        val lng = row.getString(0)
        // 经度
        val lat = row.getString(1)
        // 商圈标签
        val business = BaiDuMapApi.excutorQuery(lat, lng)

        (GeoHashEx.enCode(lng, lat)(), business)
      }).filter(f => StringUtils.isNotEmpty(f._2)).toDF("geoHash", "business")

    val prop = new Properties()
    prop.setProperty("user", "root")
    prop.setProperty("password", "1234")
    prop.setProperty("driver", "com.mysql.jdbc.Driver")
    // 存储数据
    resultDF.write.jdbc("jdbc:mysql://localhost:3306/dmp?characterEncoding=utf8", "dmp_lbs_business", prop)
  }
}
