package com.zl.busschedule

import com.zl.busschedule.model.GPSData
import org.apache.hadoop.conf.Configuration
import org.apache.phoenix.spark.SparkSqlContextFunctions
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

object Main {

  val zkUrl = "slave1,slave2,slave3:2181"
  val url = "jdbc:phoenix:slave1,slave2,slave3:2181/hbase"

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("busSchedule").setMaster("local[4]")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new SQLContext(sc)

    val configuration  = new Configuration()
    configuration.set("url", url)
    val df = new SparkSqlContextFunctions(sqlContext).phoenixTableAsDataFrame("GJ_GPS_DATA",
      Seq("CITY", "BUSNO", "GPSDATETIME", "GPSLAT", "GPSLON", "LINE", "LINEDIRECTION"),
      Some("city = '520100' and busNo = '贵AU7028' and gpsDateTime like '20180710%'"),Some(zkUrl))

    val lineCode = df.first()(5).toString
    println(lineCode)
    df.show(100)
    val rdd = df.rdd.map(row => new GPSData(row(0).toString,row(1).toString,row(2).toString,row(3).toString,row(4).toString,row(5).toString,if(row(6) == null) null else row(6).toString))

    println(rdd.count())


  }

}
