package tag

import Configer.Config
import org.apache.commons.lang.StringUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

object test {
  def main(args: Array[String]): Unit = {
    //sparkContext
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName(s"${this.getClass.getName}")
    conf.set("spark.serializer", Config.serializer)

    val sc = new SparkContext(conf)

    val sQLContext = new SQLContext(sc)
    val dataFrame = sQLContext.read.parquet(Config.parquetPath)
    dataFrame.show()
    dataFrame.filter(
      """
        |imei != "" or mac != "" or idfa != "" or openudid != "" or androidid != "" or
        |imeimd5 != "" or macmd5 != "" or idfamd5 != "" or openudidmd5 != "" or androididmd5 != "" or
        |imeisha1 != "" or macsha1 != "" or idfasha1 != "" or openudidsha1 != "" or androididsha1 != ""
      """.stripMargin).select("lat","long").filter("lat>=3.52 and lat<=53.33 and long >=73.4 and long<=135.230")
      .map(row=>{
        val lat = row.getAs[String]("lat")
        (lat)
      }).foreach(println)
    sc.stop()

  }
}
